diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 98994f4741..a3da1b0d4c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd -# created: 2023-06-28T17:03:33.371210701Z + digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 +# created: 2023-08-02T10:53:29.114535628Z diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 279eedf06a..993a4580d7 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,8 +1,4 @@ # Code owners file. # This file controls who is tagged for review for any given pull request. -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax - -* @googleapis/actools-python @googleapis/yoshi-python -*.yaml @googleapis/yoshi-python @googleapis/actools-python +* @googleapis/python-core-client-libraries diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index 41bff0b537..b2016d119b 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 775861986c..1d464c51c9 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -22,7 +22,7 @@ jobs: # Don't upgrade python version; there's a bug in 3.10 sphinx runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: @@ -35,7 +35,7 @@ jobs: mypy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -51,7 +51,7 @@ jobs: target: [showcase, showcase_alternative_templates] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -90,7 +90,7 @@ jobs: max-parallel: 1 runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup temp directory run: | sudo mkdir -p /tmp/workspace/tests/cert/ @@ -135,7 +135,7 @@ jobs: variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "${{ matrix.python }}" uses: actions/setup-python@v4 with: @@ -160,7 +160,7 @@ jobs: showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -188,7 +188,7 @@ jobs: matrix: variant: ['', _alternative_templates] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -213,7 +213,7 @@ jobs: snippetgen: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -233,7 +233,7 @@ jobs: python: ["3.7", "3.8", "3.9", "3.10", "3.11"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v4 with: @@ -255,7 +255,7 @@ jobs: variant: ['', _alternative_templates] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v4 with: @@ -274,7 +274,7 @@ jobs: runs-on: ubuntu-latest container: gcr.io/gapic-images/googleapis steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Cache Bazel files id: cache-bazel uses: actions/cache@v3 @@ -297,12 +297,12 @@ jobs: echo "and it will start over with a clean cache." echo "The old one will disappear after 7 days." - name: Integration Tests - run: bazel test //tests/integration/... + run: bazel test //tests/integration/... --test_output=errors goldens-lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 3.11 uses: actions/setup-python@v4 with: @@ -321,7 +321,7 @@ jobs: style-check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: diff --git a/.kokoro/build.sh b/.kokoro/build.sh index a8340f3a58..ecf29aa67f 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index f8137d0ae4..8e39a2cc43 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh index f52514257e..6f3972140e 100755 --- a/.kokoro/populate-secrets.sh +++ b/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC. +# Copyright 2023 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 336f0eca1e..ea4f0153bf 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index c7929db6d1..029bd342de 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -113,26 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be +cryptography==41.0.3 \ + --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ + --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ + --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ + --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ + --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ + --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ + --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ + --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ + --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ + --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ + --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ + --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ + --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ + --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ + --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ + --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ + --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ + --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ + --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ + --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ + --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ + --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ + --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de # via # gcp-releasetool # secretstorage @@ -392,9 +396,9 @@ pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.13.0 \ - --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ - --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 +pygments==2.15.0 \ + --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ + --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 # via # readme-renderer # rich diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index f39236e943..d85b1f2676 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 Google Inc. +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 4af6cdc26d..59a7cf3a93 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.trampolinerc b/.trampolinerc index 0eee72ab62..a7dfeb42c6 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Template for .trampolinerc - # Add required env vars here. required_envvars+=( ) diff --git a/BUILD.bazel b/BUILD.bazel index 8d98b4264d..7dab7fa253 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -65,6 +65,5 @@ py_binary( requirement("pypandoc"), requirement("PyYAML"), requirement("grpc-google-iam-v1"), - ] + ], ) - diff --git a/CHANGELOG.md b/CHANGELOG.md index 8a746b6f0d..7abf51515f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,17 @@ # Changelog +## [1.11.5](https://github.com/googleapis/gapic-generator-python/compare/v1.11.4...v1.11.5) (2023-09-06) + + +### Bug Fixes + +* Fix docs build for generated clients ([#1715](https://github.com/googleapis/gapic-generator-python/issues/1715)) ([e4db994](https://github.com/googleapis/gapic-generator-python/commit/e4db9941078fe417e0d7b30bcd937e6c4dc0e6ba)) +* Fix docs build for numbered lists ([#1740](https://github.com/googleapis/gapic-generator-python/issues/1740)) ([19cc5b3](https://github.com/googleapis/gapic-generator-python/commit/19cc5b36348c1406d2c84fc65e44dbe45a2bdd1c)) +* Preserve new lines ([#1721](https://github.com/googleapis/gapic-generator-python/issues/1721)) ([baa136f](https://github.com/googleapis/gapic-generator-python/commit/baa136fd4fa94cfb6638c3074f10033dcc4f9da1)) +* Remove duplicate import statement for `google.longrunning.operations_pb2` ([#1726](https://github.com/googleapis/gapic-generator-python/issues/1726)) ([e3f08cd](https://github.com/googleapis/gapic-generator-python/commit/e3f08cd48bdf93e668be1b4b117190383ce2c022)) +* Resolve some Showcase test errors ([#1353](https://github.com/googleapis/gapic-generator-python/issues/1353)) ([4eee261](https://github.com/googleapis/gapic-generator-python/commit/4eee26181e8db9fb5144eef5a76f178c1594e48a)) + ## [1.11.4](https://github.com/googleapis/gapic-generator-python/compare/v1.11.3...v1.11.4) (2023-07-11) diff --git a/README.rst b/README.rst index d5888edc5d..ef24297730 100644 --- a/README.rst +++ b/README.rst @@ -1,3 +1,5 @@ +.. _codingstyle: + API Client Generator for Python =============================== @@ -15,12 +17,81 @@ to generate a client library. Purpose ------- -This library primarily exists to facilitate experimentation, particularly -regarding: +This library replaces the `monolithic generator`_ +with some improvements: - An explicit normalized format for specifying APIs. - Light weight, in-language code generators. +.. _monolithic generator: https://github.com/googleapis/gapic-generator + + +Bazel +------------- +This generator can be called from Bazel, which is a recommended way of using it inside a continuous integration build or any other automated pipeline. + +Clone the googleapis repository +$ git clone https://github.com/googleapis/googleapis.git + +Create the targets +------------------ +You need to add the following targets to your BUILD.bazel file. + +.. code-block:: c + + load( + "@gapic_generator_python//rules_python_gapic:py_gapic.bzl", + "py_gapic_library" + ) + + load( + "@gapic_generator_python//rules_python_gapic:py_gapic_pkg.bzl", + "py_gapic_assembly_pkg" + ) + + py_gapic_library( + name = "documentai_py_gapic", + srcs = [":documentai_proto"], + ) + + py_gapic_assembly_pkg( + name = "documentai-v1beta2-py", + deps = [ + ":documentai_py_gapic", + ], + ) + + +Compiling an API +---------------- + +Using Bazel: + +.. code-block:: c + + bazel build //google/cloud/documentai/v1beta2:documentai-v1beta2-py + +Using Protoc: + +.. code-block:: c + + # This is assumed to be in the `googleapis` project root. + $ protoc google/cloud/vision/v1/*.proto \ + --python_gapic_out=/dest/ + +Development +------------- +`Development`_ + +.. _Development: https://github.com/googleapis/gapic-generator-python/blob/main/DEVELOPMENT.md + +Contributing +------------- +If you are looking to contribute to the project, please see `Contributing`_ +for guidlines. + +.. _Contributing: https://github.com/googleapis/gapic-generator-python/blob/main/CONTRIBUTING.md + Documentation ------------- diff --git a/WORKSPACE b/WORKSPACE index 3c08a04e7a..6748cf6cae 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -22,9 +22,9 @@ http_archive( ], ) -_rules_python_version = "0.9.0" +_rules_python_version = "0.24.0" -_rules_python_sha256 = "5fa3c738d33acca3b97622a13a741129f67ef43f5fdfcec63b29374cc0574c29" +_rules_python_sha256 = "0a8003b044294d7840ac7d9d73eef05d6ceb682d7516781a4ec62eeb34702578" http_archive( name = "rules_python", @@ -33,6 +33,19 @@ http_archive( url = "https://github.com/bazelbuild/rules_python/archive/{}.tar.gz".format(_rules_python_version), ) +load("@rules_python//python:repositories.bzl", "py_repositories") + +load("@rules_python//python:pip.bzl", "pip_parse") + +py_repositories() + +pip_parse( + name = "gapic_generator_python_pip_deps", + requirements_lock = "//:requirements.txt", +) +load("@gapic_generator_python_pip_deps//:requirements.bzl", "install_deps") + +install_deps() # # Import gapic-generator-python specific dependencies # diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index c5233c2703..6c98bed394 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -40,7 +40,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index f22a5dbe03..9c3bb8fa63 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -18,9 +18,14 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -30,7 +35,10 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 374e6ddbc2..9d4a91aa03 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -17,9 +17,14 @@ from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -29,7 +34,10 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/gapic/ads-templates/docs/_static/custom.css b/gapic/ads-templates/docs/_static/custom.css.j2 similarity index 100% rename from gapic/ads-templates/docs/_static/custom.css rename to gapic/ads-templates/docs/_static/custom.css.j2 diff --git a/gapic/ads-templates/docs/conf.py.j2 b/gapic/ads-templates/docs/conf.py.j2 index 5b3946301d..80e398ddf7 100644 --- a/gapic/ads-templates/docs/conf.py.j2 +++ b/gapic/ads-templates/docs/conf.py.j2 @@ -85,7 +85,7 @@ version = ".".join(release.split(".")[0:2]) # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 186e5b4e04..77974cf5ed 100644 --- a/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -44,13 +44,12 @@ from google.api_core import path_template from google.api_core import future from google.api_core import operation from google.api_core import operations_v1 -from google.longrunning import operations_pb2 {% endif %}{# lro #} {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 {% endif %} -{% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +{% if api.has_operations_mixin or service.has_lro %} +from google.longrunning import operations_pb2 # type: ignore {% endif %} from google.api_core import gapic_v1 {% for method in service.methods.values() %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index c2fd3d57d4..5abd583cac 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -40,7 +40,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index bf6e34da7b..03cc953dbe 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -50,7 +50,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 1043178b50..b2469654df 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -20,9 +20,14 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -32,7 +37,10 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} {% filter sort_lines %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 96923c5dbd..2e4f26a8a7 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -17,9 +17,14 @@ from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -29,7 +34,10 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 25b7b4db5c..94e2952fcc 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -17,9 +17,14 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -29,7 +34,10 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index bf9b5b36e2..52493d180e 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -26,8 +26,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +{% set import_ns.has_operations_mixin = True %} {% endif %} from requests import __version__ as requests_version import dataclasses @@ -45,13 +46,20 @@ except AttributeError: # pragma: NO COVER {% filter sort_lines %} {% for method in service.methods.values() %} {{method.input.ident.python_import}} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{method.output.ident.python_import}} +{% endif %} {% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore +{% endif %} from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO diff --git a/gapic/templates/docs/_static/custom.css b/gapic/templates/docs/_static/custom.css.j2 similarity index 100% rename from gapic/templates/docs/_static/custom.css rename to gapic/templates/docs/_static/custom.css.j2 diff --git a/gapic/templates/docs/conf.py.j2 b/gapic/templates/docs/conf.py.j2 index 5c7c4b9998..b1eb707e8c 100644 --- a/gapic/templates/docs/conf.py.j2 +++ b/gapic/templates/docs/conf.py.j2 @@ -85,7 +85,7 @@ version = ".".join(release.split(".")[0:2]) # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/gapic/templates/noxfile.py.j2 b/gapic/templates/noxfile.py.j2 index eec54b8ade..c592f65146 100644 --- a/gapic/templates/noxfile.py.j2 +++ b/gapic/templates/noxfile.py.j2 @@ -127,7 +127,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 68508abc92..ceb92c806b 100644 --- a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -52,13 +52,12 @@ from google.api_core import future {% if service.has_lro %} from google.api_core import operation from google.api_core import operations_v1 -from google.longrunning import operations_pb2 {% endif %}{# lro #} {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 {% endif %} -{% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +{% if api.has_operations_mixin or service.has_lro %} +from google.longrunning import operations_pb2 # type: ignore {% endif %} from google.api_core import gapic_v1 {% for method in service.methods.values() %} diff --git a/gapic/utils/lines.py b/gapic/utils/lines.py index 2c5cd4021c..fb24e19351 100644 --- a/gapic/utils/lines.py +++ b/gapic/utils/lines.py @@ -17,6 +17,9 @@ from typing import Iterable, Optional +NUMBERED_LIST_REGEX = r"^\d+\. " + + def sort_lines(text: str, dedupe: bool = True) -> str: """Sort the individual lines of a block of text. @@ -40,6 +43,49 @@ def sort_lines(text: str, dedupe: bool = True) -> str: return f'{leading}{answer}{trailing}' +def get_subsequent_line_indentation_level(list_item: str) -> int: + """ + Given a list item return the indentation level for subsequent lines. + For example, if it is a numbered list, the indentation level should be 3 + as shown below. + + Here subsequent lines should be indented by 2 + + - The quick brown fox jumps over the lazy dog. The quick brown fox jumps + over the lazy dog + + Here subsequent lines should be indented by 2 + + + The quick brown fox jumps over the lazy dog. The quick brown fox jumps + over the lazy dog + + Here subsequent lines should be indented by 4 to cater for double digits + + 1. The quick brown fox jumps over the lazy dog. The quick brown fox jumps + over the lazy dog + + 22. The quick brown fox jumps over the lazy dog. The quick brown fox jumps + over the lazy dog + """ + if len(list_item) >= 2 and list_item[0:2] in ['- ', '+ ']: + indentation_level = 2 + elif len(list_item) >= 4 and re.match(NUMBERED_LIST_REGEX, list_item): + indentation_level = 4 + else: + # Don't use any intentation level if the list item marker is not known + indentation_level = 0 + return indentation_level + + +def is_list_item(list_item: str) -> bool: + """ + Given a string return a boolean indicating whether a list is identified. + """ + if len(list_item) < 3: + return False + return list_item.startswith('- ') or list_item.startswith('+ ') or bool(re.match(NUMBERED_LIST_REGEX, list_item)) + + def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0) -> str: """Wrap the given string to the given width. @@ -93,11 +139,12 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 break_on_hyphens=False, ) # Strip the first \n from the text so it is not misidentified as an - # intentionally short line below, except when the text contains `:` - # as the new line is required for lists. + # intentionally short line below, except when the text contains a list, + # as the new line is required for lists. Look for a list item marker in + # the remaining text which indicates that a list is present. if '\n' in text: - initial_text = text.split('\n')[0] - if ":" not in initial_text: + remaining_text = "".join(text.split('\n')[1:]) + if not is_list_item(remaining_text.strip()): text = text.replace('\n', ' ', 1) # Save the new `first` line. @@ -107,17 +154,23 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 # the sphinx docs build will fail. text = re.sub(r':\n([^\n])', r':\n\n\1', text) - text = text[len(first):].strip() + text = text[len(first):] if not text: return first.strip() + # Strip leading and ending whitespace. + # Preserve new line at the beginning. + new_line = '\n' if text[0] == '\n' else '' + text = new_line + text.strip() + # Tokenize the rest of the text to try to preserve line breaks # that semantically matter. tokens = [] token = '' for line in text.split('\n'): - # Ensure that lines that start with a hyphen are always on a new line - if line.strip().startswith('-') and token: + # Ensure that lines that start with a list item marker are always on a new line + # Ensure that blank lines are preserved + if (is_list_item(line.strip()) or not len(line)) and token: tokens.append(token) token = '' token += line + '\n' @@ -139,7 +192,7 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 initial_indent=' ' * indent, # ensure that subsequent lines for lists are indented 2 spaces subsequent_indent=' ' * indent + \ - (' ' if token.strip().startswith('-') else ''), + ' ' * get_subsequent_line_indentation_level(token.strip()), text=token, width=width, break_on_hyphens=False, diff --git a/repositories.bzl b/repositories.bzl index 772ad5400e..b79bc62f98 100644 --- a/repositories.bzl +++ b/repositories.bzl @@ -1,5 +1,4 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") -load("@rules_python//python:pip.bzl", "pip_install") _PANDOC_BUILD_FILE = """ filegroup( @@ -9,11 +8,6 @@ filegroup( )""" def gapic_generator_python(): - _maybe( - pip_install, - name = "gapic_generator_python_pip_deps", - requirements = "@gapic_generator_python//:requirements.txt", - ) _protobuf_version = "3.21.12" _protobuf_sha256 = "930c2c3b5ecc6c9c12615cf5ad93f1cd6e12d0aba862b572e076259970ac3a53" @@ -66,12 +60,12 @@ def gapic_generator_python(): strip_prefix = "rules_gapic-%s" % _rules_gapic_version, urls = ["https://github.com/googleapis/rules_gapic/archive/v%s.tar.gz" % _rules_gapic_version], ) - + _commit_sha = "fae3e6e091418d6343902debaf545cfc8f32c3ff" _maybe( http_archive, name = "com_google_googleapis", - strip_prefix = "googleapis-ffc531383747ebb702dad3db237ef5fdea796363", - urls = ["https://github.com/googleapis/googleapis/archive/ffc531383747ebb702dad3db237ef5fdea796363.zip"], + strip_prefix = "googleapis-{}".format(_commit_sha), + urls = ["https://github.com/googleapis/googleapis/archive/{}.zip".format(_commit_sha)], ) def gapic_generator_register_toolchains(): diff --git a/requirements.in b/requirements.in new file mode 100644 index 0000000000..8dc2b43cf3 --- /dev/null +++ b/requirements.in @@ -0,0 +1,14 @@ +click +google-api-core +googleapis-common-protos +jinja2 +MarkupSafe +protobuf +pypandoc +PyYAML +setuptools +grpc-google-iam-v1 +proto-plus +pytest-asyncio +libcst +inflection diff --git a/requirements.txt b/requirements.txt index ebc2313f4f..933f5c2162 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,409 @@ -click==8.1.3 -google-api-core==2.11.1 -googleapis-common-protos==1.59.1 -jinja2==3.1.2 -MarkupSafe==2.1.3 -protobuf==4.23.4 -pypandoc==1.11 -PyYAML==6.0 -setuptools==68.0.0 -grpc-google-iam-v1==0.12.6 -proto-plus==1.22.3 -pytest-asyncio==0.21.0 -libcst==1.0.1 -inflection==0.5.1 \ No newline at end of file +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +cachetools==5.3.1 \ + --hash=sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590 \ + --hash=sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b + # via google-auth +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 + # via requests +charset-normalizer==3.2.0 \ + --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \ + --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \ + --hash=sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710 \ + --hash=sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 \ + --hash=sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020 \ + --hash=sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252 \ + --hash=sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad \ + --hash=sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 \ + --hash=sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a \ + --hash=sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f \ + --hash=sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6 \ + --hash=sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4 \ + --hash=sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a \ + --hash=sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 \ + --hash=sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2 \ + --hash=sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23 \ + --hash=sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace \ + --hash=sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd \ + --hash=sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982 \ + --hash=sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 \ + --hash=sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2 \ + --hash=sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea \ + --hash=sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09 \ + --hash=sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5 \ + --hash=sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 \ + --hash=sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489 \ + --hash=sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9 \ + --hash=sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80 \ + --hash=sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592 \ + --hash=sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3 \ + --hash=sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6 \ + --hash=sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed \ + --hash=sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c \ + --hash=sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200 \ + --hash=sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a \ + --hash=sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e \ + --hash=sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d \ + --hash=sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 \ + --hash=sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623 \ + --hash=sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 \ + --hash=sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3 \ + --hash=sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa \ + --hash=sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9 \ + --hash=sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 \ + --hash=sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f \ + --hash=sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1 \ + --hash=sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4 \ + --hash=sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a \ + --hash=sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8 \ + --hash=sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3 \ + --hash=sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029 \ + --hash=sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f \ + --hash=sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959 \ + --hash=sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22 \ + --hash=sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7 \ + --hash=sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952 \ + --hash=sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346 \ + --hash=sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e \ + --hash=sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d \ + --hash=sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299 \ + --hash=sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd \ + --hash=sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a \ + --hash=sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3 \ + --hash=sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 \ + --hash=sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 \ + --hash=sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c \ + --hash=sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858 \ + --hash=sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a \ + --hash=sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449 \ + --hash=sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c \ + --hash=sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918 \ + --hash=sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1 \ + --hash=sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c \ + --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \ + --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa + # via requests +click==8.1.7 \ + --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ + --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de + # via -r requirements.in +exceptiongroup==1.1.3 \ + --hash=sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9 \ + --hash=sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3 + # via pytest +google-api-core==2.11.1 \ + --hash=sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a \ + --hash=sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a + # via -r requirements.in +google-auth==2.22.0 \ + --hash=sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce \ + --hash=sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873 + # via google-api-core +googleapis-common-protos[grpc]==1.60.0 \ + --hash=sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918 \ + --hash=sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708 + # via + # -r requirements.in + # google-api-core + # grpc-google-iam-v1 +grpc-google-iam-v1==0.12.6 \ + --hash=sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f \ + --hash=sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c + # via -r requirements.in +grpcio==1.57.0 \ + --hash=sha256:00258cbe3f5188629828363ae8ff78477ce976a6f63fb2bb5e90088396faa82e \ + --hash=sha256:092fa155b945015754bdf988be47793c377b52b88d546e45c6a9f9579ac7f7b6 \ + --hash=sha256:0f80bf37f09e1caba6a8063e56e2b87fa335add314cf2b78ebf7cb45aa7e3d06 \ + --hash=sha256:20ec6fc4ad47d1b6e12deec5045ec3cd5402d9a1597f738263e98f490fe07056 \ + --hash=sha256:2313b124e475aa9017a9844bdc5eafb2d5abdda9d456af16fc4535408c7d6da6 \ + --hash=sha256:23e7d8849a0e58b806253fd206ac105b328171e01b8f18c7d5922274958cc87e \ + --hash=sha256:2f708a6a17868ad8bf586598bee69abded4996b18adf26fd2d91191383b79019 \ + --hash=sha256:2f7349786da979a94690cc5c2b804cab4e8774a3cf59be40d037c4342c906649 \ + --hash=sha256:34950353539e7d93f61c6796a007c705d663f3be41166358e3d88c45760c7d98 \ + --hash=sha256:40b72effd4c789de94ce1be2b5f88d7b9b5f7379fe9645f198854112a6567d9a \ + --hash=sha256:4b089f7ad1eb00a104078bab8015b0ed0ebcb3b589e527ab009c53893fd4e613 \ + --hash=sha256:4faea2cfdf762a664ab90589b66f416274887641ae17817de510b8178356bf73 \ + --hash=sha256:5371bcd861e679d63b8274f73ac281751d34bd54eccdbfcd6aa00e692a82cd7b \ + --hash=sha256:5613a2fecc82f95d6c51d15b9a72705553aa0d7c932fad7aed7afb51dc982ee5 \ + --hash=sha256:57b183e8b252825c4dd29114d6c13559be95387aafc10a7be645462a0fc98bbb \ + --hash=sha256:5b7a4ce8f862fe32b2a10b57752cf3169f5fe2915acfe7e6a1e155db3da99e79 \ + --hash=sha256:5e5b58e32ae14658085c16986d11e99abd002ddbf51c8daae8a0671fffb3467f \ + --hash=sha256:60fe15288a0a65d5c1cb5b4a62b1850d07336e3ba728257a810317be14f0c527 \ + --hash=sha256:6907b1cf8bb29b058081d2aad677b15757a44ef2d4d8d9130271d2ad5e33efca \ + --hash=sha256:76c44efa4ede1f42a9d5b2fed1fe9377e73a109bef8675fb0728eb80b0b8e8f2 \ + --hash=sha256:7a635589201b18510ff988161b7b573f50c6a48fae9cb567657920ca82022b37 \ + --hash=sha256:7b400807fa749a9eb286e2cd893e501b110b4d356a218426cb9c825a0474ca56 \ + --hash=sha256:82640e57fb86ea1d71ea9ab54f7e942502cf98a429a200b2e743d8672171734f \ + --hash=sha256:871f9999e0211f9551f368612460442a5436d9444606184652117d6a688c9f51 \ + --hash=sha256:9338bacf172e942e62e5889b6364e56657fbf8ac68062e8b25c48843e7b202bb \ + --hash=sha256:a8a8e560e8dbbdf29288872e91efd22af71e88b0e5736b0daf7773c1fecd99f0 \ + --hash=sha256:aed90d93b731929e742967e236f842a4a2174dc5db077c8f9ad2c5996f89f63e \ + --hash=sha256:b363bbb5253e5f9c23d8a0a034dfdf1b7c9e7f12e602fc788c435171e96daccc \ + --hash=sha256:b4098b6b638d9e0ca839a81656a2fd4bc26c9486ea707e8b1437d6f9d61c3941 \ + --hash=sha256:b53333627283e7241fcc217323f225c37783b5f0472316edcaa4479a213abfa6 \ + --hash=sha256:b670c2faa92124b7397b42303e4d8eb64a4cd0b7a77e35a9e865a55d61c57ef9 \ + --hash=sha256:bb396952cfa7ad2f01061fbc7dc1ad91dd9d69243bcb8110cf4e36924785a0fe \ + --hash=sha256:c60b83c43faeb6d0a9831f0351d7787a0753f5087cc6fa218d78fdf38e5acef0 \ + --hash=sha256:c6ebecfb7a31385393203eb04ed8b6a08f5002f53df3d59e5e795edb80999652 \ + --hash=sha256:d78d8b86fcdfa1e4c21f8896614b6cc7ee01a2a758ec0c4382d662f2a62cf766 \ + --hash=sha256:d7f8df114d6b4cf5a916b98389aeaf1e3132035420a88beea4e3d977e5f267a5 \ + --hash=sha256:e1cb52fa2d67d7f7fab310b600f22ce1ff04d562d46e9e0ac3e3403c2bb4cc16 \ + --hash=sha256:e3fdf04e402f12e1de8074458549337febb3b45f21076cc02ef4ff786aff687e \ + --hash=sha256:e503cb45ed12b924b5b988ba9576dc9949b2f5283b8e33b21dcb6be74a7c58d0 \ + --hash=sha256:f19ac6ac0a256cf77d3cc926ef0b4e64a9725cc612f97228cd5dc4bd9dbab03b \ + --hash=sha256:f1fb0fd4a1e9b11ac21c30c169d169ef434c6e9344ee0ab27cfa6f605f6387b2 \ + --hash=sha256:fada6b07ec4f0befe05218181f4b85176f11d531911b64c715d1875c4736d73a \ + --hash=sha256:fd173b4cf02b20f60860dc2ffe30115c18972d7d6d2d69df97ac38dee03be5bf \ + --hash=sha256:fe752639919aad9ffb0dee0d87f29a6467d1ef764f13c4644d212a9a853a078d \ + --hash=sha256:fee387d2fab144e8a34e0e9c5ca0f45c9376b99de45628265cfa9886b1dbe62b + # via + # googleapis-common-protos + # grpc-google-iam-v1 +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 + # via requests +inflection==0.5.1 \ + --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ + --hash=sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2 + # via -r requirements.in +iniconfig==2.0.0 \ + --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ + --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 + # via pytest +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via -r requirements.in +libcst==1.0.1 \ + --hash=sha256:0138068baf09561268c7f079373bda45f0e2b606d2d19df1307ca8a5134fc465 \ + --hash=sha256:119ba709f1dcb785a4458cf36cedb51d6f9cb2eec0acd7bb171f730eac7cb6ce \ + --hash=sha256:1adcfa7cafb6a0d39a1a0bec541355608038b45815e0c5019c95f91921d42884 \ + --hash=sha256:37187337f979ba426d8bfefc08008c3c1b09b9e9f9387050804ed2da88107570 \ + --hash=sha256:414350df5e334ddf0db1732d63da44e81b734d45abe1c597b5e5c0dd46aa4156 \ + --hash=sha256:440887e5f82efb299f2e98d4bfa5663851a878cfc0efed652ab8c50205191436 \ + --hash=sha256:47dba43855e9c7b06d8b256ee81f0ebec6a4f43605456519577e09dfe4b4288c \ + --hash=sha256:4840a3de701778f0a19582bb3085c61591329153f801dc25da84689a3733960b \ + --hash=sha256:4b4e336f6d68456017671cdda8ddebf9caebce8052cc21a3f494b03d7bd28386 \ + --hash=sha256:5599166d5fec40e18601fb8868519dde99f77b6e4ad6074958018f9545da7abd \ + --hash=sha256:5e3293e77657ba62533553bb9f0c5fb173780e164c65db1ea2a3e0d03944a284 \ + --hash=sha256:600c4d3a9a2f75d5a055fed713a5a4d812709947909610aa6527abe08a31896f \ + --hash=sha256:6caa33430c0c7a0fcad921b0deeec61ddb96796b6f88dca94966f6db62065f4f \ + --hash=sha256:80423311f09fc5fc3270ede44d30d9d8d3c2d3dd50dbf703a581ca7346949fa6 \ + --hash=sha256:8420926791b0b6206cb831a7ec73d26ae820e65bdf07ce9813c7754c7722c07a \ + --hash=sha256:8c50541c3fd6b1d5a3765c4bb5ee8ecbba9d0e798e48f79fd5adf3b6752de4d0 \ + --hash=sha256:8d31ce2790eab59c1bd8e33fe72d09cfc78635c145bdc3f08296b360abb5f443 \ + --hash=sha256:967c66fabd52102954207bf1541312b467afc210fdf7033f32da992fb6c2372c \ + --hash=sha256:9a4931feceab171e6fce73de94e13880424367247dad6ff2b49cabfec733e144 \ + --hash=sha256:9d6dec2a3c443792e6af7c36fadc256e4ea586214c76b52f0d18118811dbe351 \ + --hash=sha256:a6b5aea04c35e13109edad3cf83bc6dcd74309b150a781d2189eecb288b73a87 \ + --hash=sha256:ae49dcbfadefb82e830d41d9f0a1db0af3b771224768f431f1b7b3a9803ed7e3 \ + --hash=sha256:ae7f4e71d714f256b5f2ff98b5a9effba0f9dff4d779d8f35d7eb157bef78f59 \ + --hash=sha256:b0533de4e35396c61aeb3a6266ac30369a855910c2385aaa902ff4aabd60d409 \ + --hash=sha256:b666a605f4205c8357696f3b6571a38f6a8537cdcbb8f357587d35168298af34 \ + --hash=sha256:b97f652b15c50e91df411a9c8d5e6f75882b30743a49b387dcedd3f68ed94d75 \ + --hash=sha256:c90c74a8a314f0774f045122323fb60bacba79cbf5f71883c0848ecd67179541 \ + --hash=sha256:d237e9164a43caa7d6765ee560412264484e7620c546a2ee10a8d01bd56884e0 \ + --hash=sha256:ddd4e0eeec499d1c824ab545e62e957dbbd69a16bc4273208817638eb7d6b3c6 \ + --hash=sha256:f2cb687e1514625e91024e50a5d2e485c0ad3be24f199874ebf32b5de0346150 + # via -r requirements.in +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 + # via + # -r requirements.in + # jinja2 +mypy-extensions==1.0.0 \ + --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ + --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 + # via typing-inspect +packaging==23.1 \ + --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ + --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f + # via pytest +pluggy==1.3.0 \ + --hash=sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12 \ + --hash=sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7 + # via pytest +proto-plus==1.22.3 \ + --hash=sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df \ + --hash=sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b + # via -r requirements.in +protobuf==4.24.2 \ + --hash=sha256:237b9a50bd3b7307d0d834c1b0eb1a6cd47d3f4c2da840802cd03ea288ae8880 \ + --hash=sha256:25ae91d21e3ce8d874211110c2f7edd6384816fb44e06b2867afe35139e1fd1c \ + --hash=sha256:2b23bd6e06445699b12f525f3e92a916f2dcf45ffba441026357dea7fa46f42b \ + --hash=sha256:3b7b170d3491ceed33f723bbf2d5a260f8a4e23843799a3906f16ef736ef251e \ + --hash=sha256:4e69965e7e54de4db989289a9b971a099e626f6167a9351e9d112221fc691bc1 \ + --hash=sha256:58e12d2c1aa428ece2281cef09bbaa6938b083bcda606db3da4e02e991a0d924 \ + --hash=sha256:6bd26c1fa9038b26c5c044ee77e0ecb18463e957fefbaeb81a3feb419313a54e \ + --hash=sha256:77700b55ba41144fc64828e02afb41901b42497b8217b558e4a001f18a85f2e3 \ + --hash=sha256:7fda70797ddec31ddfa3576cbdcc3ddbb6b3078b737a1a87ab9136af0570cd6e \ + --hash=sha256:839952e759fc40b5d46be319a265cf94920174d88de31657d5622b5d8d6be5cd \ + --hash=sha256:bb7aa97c252279da65584af0456f802bd4b2de429eb945bbc9b3d61a42a8cd16 \ + --hash=sha256:c00c3c7eb9ad3833806e21e86dca448f46035242a680f81c3fe068ff65e79c74 \ + --hash=sha256:c5cdd486af081bf752225b26809d2d0a85e575b80a84cde5172a05bbb1990099 + # via + # -r requirements.in + # google-api-core + # googleapis-common-protos + # grpc-google-iam-v1 + # proto-plus +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d + # via google-auth +pypandoc==1.11 \ + --hash=sha256:7f6d68db0e57e0f6961bec2190897118c4d305fc2d31c22cd16037f22ee084a5 \ + --hash=sha256:b260596934e9cfc6513056110a7c8600171d414f90558bf4407e68b209be8007 + # via -r requirements.in +pytest==7.4.1 \ + --hash=sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab \ + --hash=sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f + # via pytest-asyncio +pytest-asyncio==0.21.1 \ + --hash=sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d \ + --hash=sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b + # via -r requirements.in +pyyaml==6.0.1 \ + --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ + --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \ + --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \ + --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \ + --hash=sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595 \ + --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \ + --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \ + --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \ + --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \ + --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \ + --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \ + --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \ + --hash=sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6 \ + --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \ + --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \ + --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \ + --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \ + --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \ + --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \ + --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \ + --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \ + --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \ + --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \ + --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \ + --hash=sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0 \ + --hash=sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515 \ + --hash=sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c \ + --hash=sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c \ + --hash=sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924 \ + --hash=sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34 \ + --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \ + --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \ + --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \ + --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \ + --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \ + --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \ + --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \ + --hash=sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585 \ + --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \ + --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f + # via + # -r requirements.in + # libcst +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 + # via google-api-core +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via google-auth +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via pytest +typing-extensions==4.7.1 \ + --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ + --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 + # via + # libcst + # typing-inspect +typing-inspect==0.9.0 \ + --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ + --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 + # via libcst +urllib3==2.0.4 \ + --hash=sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11 \ + --hash=sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4 + # via + # google-auth + # requests + +# The following packages are considered to be unsafe in a requirements file: +setuptools==68.1.2 \ + --hash=sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d \ + --hash=sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b + # via -r requirements.in diff --git a/rules_python_gapic/BUILD.bazel b/rules_python_gapic/BUILD.bazel index 41f2ac97c6..9c6e7a3d61 100644 --- a/rules_python_gapic/BUILD.bazel +++ b/rules_python_gapic/BUILD.bazel @@ -1 +1,4 @@ -exports_files(["test.py", "pytest.py"]) +exports_files([ + "test.py", + "pytest.py", +]) diff --git a/rules_python_gapic/pytest.py b/rules_python_gapic/pytest.py index 934fdfa680..7209585163 100644 --- a/rules_python_gapic/pytest.py +++ b/rules_python_gapic/pytest.py @@ -3,9 +3,14 @@ import os -if __name__ == '__main__': - sys.exit(pytest.main([ - '--disable-pytest-warnings', - '--quiet', - os.path.dirname(os.path.abspath(__file__)) - ])) +if __name__ == "__main__": + # The generated file name will be of the form `_pytest.py`. + # The generated gapic will be in a directory `_srcjar.py``. + # Extract the ``` from this file, and use it to determine the + # directory of the generated gapic. + # Only run `pytest` on the `tests` directory. + module_name = os.path.abspath(__file__).replace("_pytest.py", "") + src_directory = f"{module_name}_srcjar.py" + sys.exit( + pytest.main(["--disable-pytest-warnings", "--quiet", f"{src_directory}/tests"]) + ) diff --git a/setup.py b/setup.py index 3840434c39..3d5d85e961 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.4" +version = "1.11.5" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", diff --git a/tests/integration/BUILD.bazel b/tests/integration/BUILD.bazel index 347510d833..4d700c938f 100644 --- a/tests/integration/BUILD.bazel +++ b/tests/integration/BUILD.bazel @@ -2,10 +2,6 @@ load( "@gapic_generator_python//rules_python_gapic:py_gapic.bzl", "py_gapic_library", ) -load( - "@gapic_generator_python//rules_python_gapic:py_gapic_pkg.bzl", - "py_gapic_assembly_pkg", -) load( "@gapic_generator_python//rules_python_gapic/test:integration_test.bzl", "golden_update", @@ -16,8 +12,6 @@ load( "py_proto_library", ) -load("@rules_proto//proto:defs.bzl", "proto_library") - package(default_visibility = ["//visibility:public"]) #################################################### @@ -60,6 +54,7 @@ py_gapic_library( opt_args = [ "autogen-snippets", ], + service_yaml = "cloudasset_v1.yaml", transport = "grpc+rest", ) @@ -71,6 +66,7 @@ py_gapic_library( opt_args = [ "autogen-snippets", ], + service_yaml = "iamcredentials_v1.yaml", transport = "grpc+rest", ) @@ -103,7 +99,7 @@ py_gapic_library( ], service_yaml = "eventarc_v1.yaml", transport = "grpc+rest", - deps = [":iam_policy_py_proto"] + deps = [":iam_policy_py_proto"], ) py_test( @@ -128,7 +124,12 @@ py_gapic_library( "python-gapic-name=logging", "autogen-snippets", ], - transport = "grpc+rest", + # REST is not generated because of the following issues: + # - REST unit test `test_update_settings_rest_flattened` in logging_v2 fails. See #1728 + # - REST is not generated in the public `BUILD.bazel` + # https://github.com/googleapis/googleapis/blob/e85662e798a0a9495a035839f66d0c037c481e2c/google/logging/v2/BUILD.bazel#L201 + service_yaml = "logging_v2.yaml", + transport = "grpc", ) py_test( @@ -150,6 +151,7 @@ py_gapic_library( opt_args = [ "autogen-snippets", ], + service_yaml = "redis_v1.yaml", transport = "grpc+rest", ) diff --git a/tests/integration/cloudasset_v1.yaml b/tests/integration/cloudasset_v1.yaml new file mode 100644 index 0000000000..65bc90f6e2 --- /dev/null +++ b/tests/integration/cloudasset_v1.yaml @@ -0,0 +1,47 @@ +type: google.api.Service +config_version: 3 +name: cloudasset.googleapis.com +title: Cloud Asset API + +apis: +- name: google.cloud.asset.v1.AssetService +- name: google.longrunning.Operations + +types: +- name: google.cloud.asset.v1.AnalyzeIamPolicyLongrunningMetadata +- name: google.cloud.asset.v1.AnalyzeIamPolicyLongrunningResponse + +documentation: + summary: |- + The Cloud Asset API manages the history and inventory of Google Cloud + resources. + overview: |- + # Cloud Asset API + The Cloud Asset API keeps a history of Google Cloud asset metadata, and + allows Google Cloud users to download a dump of all asset metadata for the + resource types listed below within an organization or a project at a given + timestamp. + Read more documents here: + https://cloud.google.com/asset-inventory/docs +backend: + rules: + - selector: 'google.cloud.asset.v1.AssetService.*' + deadline: 600.0 + - selector: google.longrunning.Operations.GetOperation + deadline: 60.0 + +http: + rules: + - selector: google.longrunning.Operations.GetOperation + get: '/v1/{name=*/*/operations/*/**}' + +authentication: + rules: + - selector: 'google.cloud.asset.v1.AssetService.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: google.longrunning.Operations.GetOperation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform \ No newline at end of file diff --git a/tests/integration/eventarc_v1.yaml b/tests/integration/eventarc_v1.yaml index afd8162044..26d8875e5b 100644 --- a/tests/integration/eventarc_v1.yaml +++ b/tests/integration/eventarc_v1.yaml @@ -7,3 +7,112 @@ apis: - name: google.cloud.eventarc.v1.Eventarc - name: google.cloud.location.Locations - name: google.iam.v1.IAMPolicy +- name: google.longrunning.Operations + +types: +- name: google.cloud.eventarc.v1.OperationMetadata + +documentation: + summary: 'Build event-driven applications on Google Cloud Platform.' + overview: |- + Eventarc lets you asynchronously deliver events from Google services, SaaS, + and your own apps using loosely coupled services that react to state + changes. Eventarc requires no infrastructure management — you can optimize + productivity and costs while building an event-driven solution. + rules: + - selector: google.cloud.location.Locations.GetLocation + description: Gets information about a location. + + - selector: google.cloud.location.Locations.ListLocations + description: Lists information about the supported locations for this service. + + - selector: google.iam.v1.IAMPolicy.GetIamPolicy + description: |- + Gets the access control policy for a resource. Returns an empty policy + if the resource exists and does not have a policy set. + - selector: google.iam.v1.IAMPolicy.SetIamPolicy + description: |- + Sets the access control policy on the specified resource. Replaces + any existing policy. + Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` + errors. + - selector: google.iam.v1.IAMPolicy.TestIamPermissions + description: |- + Returns permissions that a caller has on the specified resource. If the + resource does not exist, this will return an empty set of + permissions, not a `NOT_FOUND` error. + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for authorization + checking. This operation may "fail open" without warning. +backend: + rules: + - selector: 'google.cloud.eventarc.v1.Eventarc.*' + deadline: 60.0 + - selector: google.cloud.location.Locations.GetLocation + deadline: 60.0 + - selector: google.cloud.location.Locations.ListLocations + deadline: 60.0 + - selector: 'google.iam.v1.IAMPolicy.*' + deadline: 60.0 + - selector: 'google.longrunning.Operations.*' + deadline: 60.0 + +http: + rules: + - selector: google.cloud.location.Locations.GetLocation + get: '/v1/{name=projects/*/locations/*}' + - selector: google.cloud.location.Locations.ListLocations + get: '/v1/{name=projects/*}/locations' + - selector: google.iam.v1.IAMPolicy.GetIamPolicy + get: '/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy' + additional_bindings: + - get: '/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy' + - get: '/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy' + - selector: google.iam.v1.IAMPolicy.SetIamPolicy + post: '/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy' + body: '*' + additional_bindings: + - post: '/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy' + body: '*' + - post: '/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy' + body: '*' + - selector: google.iam.v1.IAMPolicy.TestIamPermissions + post: '/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions' + body: '*' + additional_bindings: + - post: '/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions' + body: '*' + - post: '/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions' + body: '*' + - selector: google.longrunning.Operations.CancelOperation + post: '/v1/{name=projects/*/locations/*/operations/*}:cancel' + body: '*' + - selector: google.longrunning.Operations.DeleteOperation + delete: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.GetOperation + get: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.ListOperations + get: '/v1/{name=projects/*/locations/*}/operations' + +authentication: + rules: + - selector: 'google.cloud.eventarc.v1.Eventarc.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: google.cloud.location.Locations.GetLocation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: google.cloud.location.Locations.ListLocations + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.iam.v1.IAMPolicy.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.longrunning.Operations.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform \ No newline at end of file diff --git a/tests/integration/goldens/asset/docs/_static/custom.css b/tests/integration/goldens/asset/docs/_static/custom.css new file mode 100755 index 0000000000..06423be0b5 --- /dev/null +++ b/tests/integration/goldens/asset/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/tests/integration/goldens/asset/docs/conf.py b/tests/integration/goldens/asset/docs/conf.py index 9e78282ded..b6b4f0941d 100755 --- a/tests/integration/goldens/asset/docs/conf.py +++ b/tests/integration/goldens/asset/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/tests/integration/goldens/asset/google/cloud/asset/__init__.py index f8aff0bc41..366ca0861d 100755 --- a/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -21,15 +21,30 @@ from google.cloud.asset_v1.services.asset_service.client import AssetServiceClient from google.cloud.asset_v1.services.asset_service.async_client import AssetServiceAsyncClient +from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningMetadata from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningRequest from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningResponse from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyRequest from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyResponse +from google.cloud.asset_v1.types.asset_service import AnalyzeMoveRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeMoveResponse +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesResponse +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse +from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicy +from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicyConstraint from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryRequest from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryResponse +from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesRequest +from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesResponse from google.cloud.asset_v1.types.asset_service import BigQueryDestination from google.cloud.asset_v1.types.asset_service import CreateFeedRequest +from google.cloud.asset_v1.types.asset_service import CreateSavedQueryRequest from google.cloud.asset_v1.types.asset_service import DeleteFeedRequest +from google.cloud.asset_v1.types.asset_service import DeleteSavedQueryRequest from google.cloud.asset_v1.types.asset_service import ExportAssetsRequest from google.cloud.asset_v1.types.asset_service import ExportAssetsResponse from google.cloud.asset_v1.types.asset_service import Feed @@ -37,43 +52,79 @@ from google.cloud.asset_v1.types.asset_service import GcsDestination from google.cloud.asset_v1.types.asset_service import GcsOutputResult from google.cloud.asset_v1.types.asset_service import GetFeedRequest +from google.cloud.asset_v1.types.asset_service import GetSavedQueryRequest from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisOutputConfig from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisQuery from google.cloud.asset_v1.types.asset_service import ListAssetsRequest from google.cloud.asset_v1.types.asset_service import ListAssetsResponse from google.cloud.asset_v1.types.asset_service import ListFeedsRequest from google.cloud.asset_v1.types.asset_service import ListFeedsResponse +from google.cloud.asset_v1.types.asset_service import ListSavedQueriesRequest +from google.cloud.asset_v1.types.asset_service import ListSavedQueriesResponse +from google.cloud.asset_v1.types.asset_service import MoveAnalysis +from google.cloud.asset_v1.types.asset_service import MoveAnalysisResult +from google.cloud.asset_v1.types.asset_service import MoveImpact from google.cloud.asset_v1.types.asset_service import OutputConfig from google.cloud.asset_v1.types.asset_service import OutputResult from google.cloud.asset_v1.types.asset_service import PartitionSpec from google.cloud.asset_v1.types.asset_service import PubsubDestination +from google.cloud.asset_v1.types.asset_service import QueryAssetsOutputConfig +from google.cloud.asset_v1.types.asset_service import QueryAssetsRequest +from google.cloud.asset_v1.types.asset_service import QueryAssetsResponse +from google.cloud.asset_v1.types.asset_service import QueryResult +from google.cloud.asset_v1.types.asset_service import SavedQuery from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesRequest from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesResponse from google.cloud.asset_v1.types.asset_service import SearchAllResourcesRequest from google.cloud.asset_v1.types.asset_service import SearchAllResourcesResponse +from google.cloud.asset_v1.types.asset_service import TableFieldSchema +from google.cloud.asset_v1.types.asset_service import TableSchema from google.cloud.asset_v1.types.asset_service import UpdateFeedRequest +from google.cloud.asset_v1.types.asset_service import UpdateSavedQueryRequest from google.cloud.asset_v1.types.asset_service import ContentType from google.cloud.asset_v1.types.assets import Asset +from google.cloud.asset_v1.types.assets import AttachedResource from google.cloud.asset_v1.types.assets import ConditionEvaluation from google.cloud.asset_v1.types.assets import IamPolicyAnalysisResult from google.cloud.asset_v1.types.assets import IamPolicyAnalysisState from google.cloud.asset_v1.types.assets import IamPolicySearchResult +from google.cloud.asset_v1.types.assets import RelatedAsset +from google.cloud.asset_v1.types.assets import RelatedAssets +from google.cloud.asset_v1.types.assets import RelatedResource +from google.cloud.asset_v1.types.assets import RelatedResources +from google.cloud.asset_v1.types.assets import RelationshipAttributes from google.cloud.asset_v1.types.assets import Resource from google.cloud.asset_v1.types.assets import ResourceSearchResult from google.cloud.asset_v1.types.assets import TemporalAsset from google.cloud.asset_v1.types.assets import TimeWindow +from google.cloud.asset_v1.types.assets import VersionedResource __all__ = ('AssetServiceClient', 'AssetServiceAsyncClient', + 'AnalyzeIamPolicyLongrunningMetadata', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', 'AnalyzeIamPolicyRequest', 'AnalyzeIamPolicyResponse', + 'AnalyzeMoveRequest', + 'AnalyzeMoveResponse', + 'AnalyzeOrgPoliciesRequest', + 'AnalyzeOrgPoliciesResponse', + 'AnalyzeOrgPolicyGovernedAssetsRequest', + 'AnalyzeOrgPolicyGovernedAssetsResponse', + 'AnalyzeOrgPolicyGovernedContainersRequest', + 'AnalyzeOrgPolicyGovernedContainersResponse', + 'AnalyzerOrgPolicy', + 'AnalyzerOrgPolicyConstraint', 'BatchGetAssetsHistoryRequest', 'BatchGetAssetsHistoryResponse', + 'BatchGetEffectiveIamPoliciesRequest', + 'BatchGetEffectiveIamPoliciesResponse', 'BigQueryDestination', 'CreateFeedRequest', + 'CreateSavedQueryRequest', 'DeleteFeedRequest', + 'DeleteSavedQueryRequest', 'ExportAssetsRequest', 'ExportAssetsResponse', 'Feed', @@ -81,29 +132,50 @@ 'GcsDestination', 'GcsOutputResult', 'GetFeedRequest', + 'GetSavedQueryRequest', 'IamPolicyAnalysisOutputConfig', 'IamPolicyAnalysisQuery', 'ListAssetsRequest', 'ListAssetsResponse', 'ListFeedsRequest', 'ListFeedsResponse', + 'ListSavedQueriesRequest', + 'ListSavedQueriesResponse', + 'MoveAnalysis', + 'MoveAnalysisResult', + 'MoveImpact', 'OutputConfig', 'OutputResult', 'PartitionSpec', 'PubsubDestination', + 'QueryAssetsOutputConfig', + 'QueryAssetsRequest', + 'QueryAssetsResponse', + 'QueryResult', + 'SavedQuery', 'SearchAllIamPoliciesRequest', 'SearchAllIamPoliciesResponse', 'SearchAllResourcesRequest', 'SearchAllResourcesResponse', + 'TableFieldSchema', + 'TableSchema', 'UpdateFeedRequest', + 'UpdateSavedQueryRequest', 'ContentType', 'Asset', + 'AttachedResource', 'ConditionEvaluation', 'IamPolicyAnalysisResult', 'IamPolicyAnalysisState', 'IamPolicySearchResult', + 'RelatedAsset', + 'RelatedAssets', + 'RelatedResource', + 'RelatedResources', + 'RelationshipAttributes', 'Resource', 'ResourceSearchResult', 'TemporalAsset', 'TimeWindow', + 'VersionedResource', ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 78280c4b82..8fbe3ed54f 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -21,15 +21,30 @@ from .services.asset_service import AssetServiceClient from .services.asset_service import AssetServiceAsyncClient +from .types.asset_service import AnalyzeIamPolicyLongrunningMetadata from .types.asset_service import AnalyzeIamPolicyLongrunningRequest from .types.asset_service import AnalyzeIamPolicyLongrunningResponse from .types.asset_service import AnalyzeIamPolicyRequest from .types.asset_service import AnalyzeIamPolicyResponse +from .types.asset_service import AnalyzeMoveRequest +from .types.asset_service import AnalyzeMoveResponse +from .types.asset_service import AnalyzeOrgPoliciesRequest +from .types.asset_service import AnalyzeOrgPoliciesResponse +from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest +from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse +from .types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest +from .types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse +from .types.asset_service import AnalyzerOrgPolicy +from .types.asset_service import AnalyzerOrgPolicyConstraint from .types.asset_service import BatchGetAssetsHistoryRequest from .types.asset_service import BatchGetAssetsHistoryResponse +from .types.asset_service import BatchGetEffectiveIamPoliciesRequest +from .types.asset_service import BatchGetEffectiveIamPoliciesResponse from .types.asset_service import BigQueryDestination from .types.asset_service import CreateFeedRequest +from .types.asset_service import CreateSavedQueryRequest from .types.asset_service import DeleteFeedRequest +from .types.asset_service import DeleteSavedQueryRequest from .types.asset_service import ExportAssetsRequest from .types.asset_service import ExportAssetsResponse from .types.asset_service import Feed @@ -37,47 +52,84 @@ from .types.asset_service import GcsDestination from .types.asset_service import GcsOutputResult from .types.asset_service import GetFeedRequest +from .types.asset_service import GetSavedQueryRequest from .types.asset_service import IamPolicyAnalysisOutputConfig from .types.asset_service import IamPolicyAnalysisQuery from .types.asset_service import ListAssetsRequest from .types.asset_service import ListAssetsResponse from .types.asset_service import ListFeedsRequest from .types.asset_service import ListFeedsResponse +from .types.asset_service import ListSavedQueriesRequest +from .types.asset_service import ListSavedQueriesResponse +from .types.asset_service import MoveAnalysis +from .types.asset_service import MoveAnalysisResult +from .types.asset_service import MoveImpact from .types.asset_service import OutputConfig from .types.asset_service import OutputResult from .types.asset_service import PartitionSpec from .types.asset_service import PubsubDestination +from .types.asset_service import QueryAssetsOutputConfig +from .types.asset_service import QueryAssetsRequest +from .types.asset_service import QueryAssetsResponse +from .types.asset_service import QueryResult +from .types.asset_service import SavedQuery from .types.asset_service import SearchAllIamPoliciesRequest from .types.asset_service import SearchAllIamPoliciesResponse from .types.asset_service import SearchAllResourcesRequest from .types.asset_service import SearchAllResourcesResponse +from .types.asset_service import TableFieldSchema +from .types.asset_service import TableSchema from .types.asset_service import UpdateFeedRequest +from .types.asset_service import UpdateSavedQueryRequest from .types.asset_service import ContentType from .types.assets import Asset +from .types.assets import AttachedResource from .types.assets import ConditionEvaluation from .types.assets import IamPolicyAnalysisResult from .types.assets import IamPolicyAnalysisState from .types.assets import IamPolicySearchResult +from .types.assets import RelatedAsset +from .types.assets import RelatedAssets +from .types.assets import RelatedResource +from .types.assets import RelatedResources +from .types.assets import RelationshipAttributes from .types.assets import Resource from .types.assets import ResourceSearchResult from .types.assets import TemporalAsset from .types.assets import TimeWindow +from .types.assets import VersionedResource __all__ = ( 'AssetServiceAsyncClient', +'AnalyzeIamPolicyLongrunningMetadata', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', 'AnalyzeIamPolicyRequest', 'AnalyzeIamPolicyResponse', +'AnalyzeMoveRequest', +'AnalyzeMoveResponse', +'AnalyzeOrgPoliciesRequest', +'AnalyzeOrgPoliciesResponse', +'AnalyzeOrgPolicyGovernedAssetsRequest', +'AnalyzeOrgPolicyGovernedAssetsResponse', +'AnalyzeOrgPolicyGovernedContainersRequest', +'AnalyzeOrgPolicyGovernedContainersResponse', +'AnalyzerOrgPolicy', +'AnalyzerOrgPolicyConstraint', 'Asset', 'AssetServiceClient', +'AttachedResource', 'BatchGetAssetsHistoryRequest', 'BatchGetAssetsHistoryResponse', +'BatchGetEffectiveIamPoliciesRequest', +'BatchGetEffectiveIamPoliciesResponse', 'BigQueryDestination', 'ConditionEvaluation', 'ContentType', 'CreateFeedRequest', +'CreateSavedQueryRequest', 'DeleteFeedRequest', +'DeleteSavedQueryRequest', 'ExportAssetsRequest', 'ExportAssetsResponse', 'Feed', @@ -85,6 +137,7 @@ 'GcsDestination', 'GcsOutputResult', 'GetFeedRequest', +'GetSavedQueryRequest', 'IamPolicyAnalysisOutputConfig', 'IamPolicyAnalysisQuery', 'IamPolicyAnalysisResult', @@ -94,17 +147,36 @@ 'ListAssetsResponse', 'ListFeedsRequest', 'ListFeedsResponse', +'ListSavedQueriesRequest', +'ListSavedQueriesResponse', +'MoveAnalysis', +'MoveAnalysisResult', +'MoveImpact', 'OutputConfig', 'OutputResult', 'PartitionSpec', 'PubsubDestination', +'QueryAssetsOutputConfig', +'QueryAssetsRequest', +'QueryAssetsResponse', +'QueryResult', +'RelatedAsset', +'RelatedAssets', +'RelatedResource', +'RelatedResources', +'RelationshipAttributes', 'Resource', 'ResourceSearchResult', +'SavedQuery', 'SearchAllIamPoliciesRequest', 'SearchAllIamPoliciesResponse', 'SearchAllResourcesRequest', 'SearchAllResourcesResponse', +'TableFieldSchema', +'TableSchema', 'TemporalAsset', 'TimeWindow', 'UpdateFeedRequest', +'UpdateSavedQueryRequest', +'VersionedResource', ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json b/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json index c87ac115e9..e39e504313 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json @@ -20,21 +20,56 @@ "analyze_iam_policy_longrunning" ] }, + "AnalyzeMove": { + "methods": [ + "analyze_move" + ] + }, + "AnalyzeOrgPolicies": { + "methods": [ + "analyze_org_policies" + ] + }, + "AnalyzeOrgPolicyGovernedAssets": { + "methods": [ + "analyze_org_policy_governed_assets" + ] + }, + "AnalyzeOrgPolicyGovernedContainers": { + "methods": [ + "analyze_org_policy_governed_containers" + ] + }, "BatchGetAssetsHistory": { "methods": [ "batch_get_assets_history" ] }, + "BatchGetEffectiveIamPolicies": { + "methods": [ + "batch_get_effective_iam_policies" + ] + }, "CreateFeed": { "methods": [ "create_feed" ] }, + "CreateSavedQuery": { + "methods": [ + "create_saved_query" + ] + }, "DeleteFeed": { "methods": [ "delete_feed" ] }, + "DeleteSavedQuery": { + "methods": [ + "delete_saved_query" + ] + }, "ExportAssets": { "methods": [ "export_assets" @@ -45,6 +80,11 @@ "get_feed" ] }, + "GetSavedQuery": { + "methods": [ + "get_saved_query" + ] + }, "ListAssets": { "methods": [ "list_assets" @@ -55,6 +95,16 @@ "list_feeds" ] }, + "ListSavedQueries": { + "methods": [ + "list_saved_queries" + ] + }, + "QueryAssets": { + "methods": [ + "query_assets" + ] + }, "SearchAllIamPolicies": { "methods": [ "search_all_iam_policies" @@ -69,6 +119,11 @@ "methods": [ "update_feed" ] + }, + "UpdateSavedQuery": { + "methods": [ + "update_saved_query" + ] } } }, @@ -85,21 +140,56 @@ "analyze_iam_policy_longrunning" ] }, + "AnalyzeMove": { + "methods": [ + "analyze_move" + ] + }, + "AnalyzeOrgPolicies": { + "methods": [ + "analyze_org_policies" + ] + }, + "AnalyzeOrgPolicyGovernedAssets": { + "methods": [ + "analyze_org_policy_governed_assets" + ] + }, + "AnalyzeOrgPolicyGovernedContainers": { + "methods": [ + "analyze_org_policy_governed_containers" + ] + }, "BatchGetAssetsHistory": { "methods": [ "batch_get_assets_history" ] }, + "BatchGetEffectiveIamPolicies": { + "methods": [ + "batch_get_effective_iam_policies" + ] + }, "CreateFeed": { "methods": [ "create_feed" ] }, + "CreateSavedQuery": { + "methods": [ + "create_saved_query" + ] + }, "DeleteFeed": { "methods": [ "delete_feed" ] }, + "DeleteSavedQuery": { + "methods": [ + "delete_saved_query" + ] + }, "ExportAssets": { "methods": [ "export_assets" @@ -110,6 +200,11 @@ "get_feed" ] }, + "GetSavedQuery": { + "methods": [ + "get_saved_query" + ] + }, "ListAssets": { "methods": [ "list_assets" @@ -120,6 +215,16 @@ "list_feeds" ] }, + "ListSavedQueries": { + "methods": [ + "list_saved_queries" + ] + }, + "QueryAssets": { + "methods": [ + "query_assets" + ] + }, "SearchAllIamPolicies": { "methods": [ "search_all_iam_policies" @@ -134,6 +239,11 @@ "methods": [ "update_feed" ] + }, + "UpdateSavedQuery": { + "methods": [ + "update_saved_query" + ] } } }, @@ -150,21 +260,56 @@ "analyze_iam_policy_longrunning" ] }, + "AnalyzeMove": { + "methods": [ + "analyze_move" + ] + }, + "AnalyzeOrgPolicies": { + "methods": [ + "analyze_org_policies" + ] + }, + "AnalyzeOrgPolicyGovernedAssets": { + "methods": [ + "analyze_org_policy_governed_assets" + ] + }, + "AnalyzeOrgPolicyGovernedContainers": { + "methods": [ + "analyze_org_policy_governed_containers" + ] + }, "BatchGetAssetsHistory": { "methods": [ "batch_get_assets_history" ] }, + "BatchGetEffectiveIamPolicies": { + "methods": [ + "batch_get_effective_iam_policies" + ] + }, "CreateFeed": { "methods": [ "create_feed" ] }, + "CreateSavedQuery": { + "methods": [ + "create_saved_query" + ] + }, "DeleteFeed": { "methods": [ "delete_feed" ] }, + "DeleteSavedQuery": { + "methods": [ + "delete_saved_query" + ] + }, "ExportAssets": { "methods": [ "export_assets" @@ -175,6 +320,11 @@ "get_feed" ] }, + "GetSavedQuery": { + "methods": [ + "get_saved_query" + ] + }, "ListAssets": { "methods": [ "list_assets" @@ -185,6 +335,16 @@ "list_feeds" ] }, + "ListSavedQueries": { + "methods": [ + "list_saved_queries" + ] + }, + "QueryAssets": { + "methods": [ + "query_assets" + ] + }, "SearchAllIamPolicies": { "methods": [ "search_all_iam_policies" @@ -199,6 +359,11 @@ "methods": [ "update_feed" ] + }, + "UpdateSavedQuery": { + "methods": [ + "update_saved_query" + ] } } } diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index c5106ca73b..9a4309d1bb 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -37,6 +37,10 @@ from google.cloud.asset_v1.services.asset_service import pagers from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport @@ -51,10 +55,20 @@ class AssetServiceAsyncClient: DEFAULT_ENDPOINT = AssetServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = AssetServiceClient.DEFAULT_MTLS_ENDPOINT + access_level_path = staticmethod(AssetServiceClient.access_level_path) + parse_access_level_path = staticmethod(AssetServiceClient.parse_access_level_path) + access_policy_path = staticmethod(AssetServiceClient.access_policy_path) + parse_access_policy_path = staticmethod(AssetServiceClient.parse_access_policy_path) asset_path = staticmethod(AssetServiceClient.asset_path) parse_asset_path = staticmethod(AssetServiceClient.parse_asset_path) feed_path = staticmethod(AssetServiceClient.feed_path) parse_feed_path = staticmethod(AssetServiceClient.parse_feed_path) + inventory_path = staticmethod(AssetServiceClient.inventory_path) + parse_inventory_path = staticmethod(AssetServiceClient.parse_inventory_path) + saved_query_path = staticmethod(AssetServiceClient.saved_query_path) + parse_saved_query_path = staticmethod(AssetServiceClient.parse_saved_query_path) + service_perimeter_path = staticmethod(AssetServiceClient.service_perimeter_path) + parse_service_perimeter_path = staticmethod(AssetServiceClient.parse_service_perimeter_path) common_billing_account_path = staticmethod(AssetServiceClient.common_billing_account_path) parse_common_billing_account_path = staticmethod(AssetServiceClient.parse_common_billing_account_path) common_folder_path = staticmethod(AssetServiceClient.common_folder_path) @@ -203,10 +217,10 @@ async def export_assets(self, line represents a [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in the JSON format; for BigQuery table destinations, the output - table stores the fields in asset proto as columns. This API + table stores the fields in asset Protobuf as columns. This API implements the - [google.longrunning.Operation][google.longrunning.Operation] API - , which allows you to keep track of the export. We recommend + [google.longrunning.Operation][google.longrunning.Operation] + API, which allows you to keep track of the export. We recommend intervals of at least 2 seconds with exponential retry to poll the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. @@ -345,11 +359,13 @@ async def sample_list_assets(): request (Optional[Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]]): The request object. ListAssets request. parent (:class:`str`): - Required. Name of the organization or project the assets - belong to. Format: "organizations/[organization-number]" - (such as "organizations/123"), "projects/[project-id]" - (such as "projects/my-project-id"), or - "projects/[project-number]" (such as "projects/12345"). + Required. Name of the organization, folder, or project + the assets belong to. Format: + "organizations/[organization-number]" (such as + "organizations/123"), "projects/[project-id]" (such as + "projects/my-project-id"), "projects/[project-number]" + (such as "projects/12345"), or "folders/[folder-number]" + (such as "folders/12345"). This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -363,6 +379,7 @@ async def sample_list_assets(): Returns: google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager: ListAssets response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -563,8 +580,8 @@ async def sample_create_feed(): be an organization number (such as "organizations/123"), a folder number (such as "folders/123"), a project ID - (such as "projects/my-project-id")", or - a project number (such as + (such as "projects/my-project-id"), or a + project number (such as "projects/12345"). This corresponds to the ``parent`` field @@ -1074,10 +1091,10 @@ async def search_all_resources(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesAsyncPager: - r"""Searches all Cloud resources within the specified scope, such as - a project, folder, or organization. The caller must be granted - the ``cloudasset.assets.searchAllResources`` permission on the - desired scope, otherwise the request will be rejected. + r"""Searches all Google Cloud resources within the specified scope, + such as a project, folder, or organization. The caller must be + granted the ``cloudasset.assets.searchAllResources`` permission + on the desired scope, otherwise the request will be rejected. .. code-block:: python @@ -1135,46 +1152,64 @@ async def sample_search_all_resources(): Examples: - - ``name:Important`` to find Cloud resources whose name - contains "Important" as a word. - - ``name=Important`` to find the Cloud resource whose - name is exactly "Important". - - ``displayName:Impor*`` to find Cloud resources whose - display name contains "Impor" as a prefix of any word - in the field. - - ``location:us-west*`` to find Cloud resources whose - location contains both "us" and "west" as prefixes. - - ``labels:prod`` to find Cloud resources whose labels - contain "prod" as a key or value. - - ``labels.env:prod`` to find Cloud resources that have - a label "env" and its value is "prod". - - ``labels.env:*`` to find Cloud resources that have a - label "env". - - ``kmsKey:key`` to find Cloud resources encrypted with - a customer-managed encryption key whose name contains - the word "key". - - ``state:ACTIVE`` to find Cloud resources whose state - contains "ACTIVE" as a word. - - ``NOT state:ACTIVE`` to find {{gcp_name}} resources + - ``name:Important`` to find Google Cloud resources + whose name contains "Important" as a word. + - ``name=Important`` to find the Google Cloud resource + whose name is exactly "Important". + - ``displayName:Impor*`` to find Google Cloud resources + whose display name contains "Impor" as a prefix of + any word in the field. + - ``location:us-west*`` to find Google Cloud resources + whose location contains both "us" and "west" as + prefixes. + - ``labels:prod`` to find Google Cloud resources whose + labels contain "prod" as a key or value. + - ``labels.env:prod`` to find Google Cloud resources + that have a label "env" and its value is "prod". + - ``labels.env:*`` to find Google Cloud resources that + have a label "env". + - ``kmsKey:key`` to find Google Cloud resources + encrypted with a customer-managed encryption key + whose name contains "key" as a word. This field is + deprecated. Please use the ``kmsKeys`` field to + retrieve Cloud KMS key information. + - ``kmsKeys:key`` to find Google Cloud resources + encrypted with customer-managed encryption keys whose + name contains the word "key". + - ``relationships:instance-group-1`` to find Google + Cloud resources that have relationships with + "instance-group-1" in the related resource name. + - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find + Compute Engine instances that have relationships of + type "INSTANCE_TO_INSTANCEGROUP". + - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` + to find Compute Engine instances that have + relationships with "instance-group-1" in the Compute + Engine instance group resource name, for relationship + type "INSTANCE_TO_INSTANCEGROUP". + - ``state:ACTIVE`` to find Google Cloud resources whose + state contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find Google Cloud resources whose state doesn't contain "ACTIVE" as a word. - - ``createTime<1609459200`` to find Cloud resources - that were created before "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 - 00:00:00 UTC" in seconds. - - ``updateTime>1609459200`` to find Cloud resources - that were updated after "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 - 00:00:00 UTC" in seconds. - - ``Important`` to find Cloud resources that contain - "Important" as a word in any of the searchable - fields. - - ``Impor*`` to find Cloud resources that contain - "Impor" as a prefix of any word in any of the + - ``createTime<1609459200`` to find Google Cloud + resources that were created before "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``updateTime>1609459200`` to find Google Cloud + resources that were updated after "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``Important`` to find Google Cloud resources that + contain "Important" as a word in any of the + searchable fields. + - ``Impor*`` to find Google Cloud resources that + contain "Impor" as a prefix of any word in any of the searchable fields. - ``Important location:(us-west1 OR global)`` to find - Cloud resources that contain "Important" as a word in - any of the searchable fields and are also located in - the "us-west1" region or the "global" location. + Google Cloud resources that contain "Important" as a + word in any of the searchable fields and are also + located in the "us-west1" region or the "global" + location. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1211,6 +1246,7 @@ async def sample_search_all_resources(): Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager: Search all resources response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1346,12 +1382,12 @@ async def sample_search_all_iam_policies(): for more information. If not specified or empty, it will search all the IAM policies within the specified ``scope``. Note that the query string is compared - against each Cloud IAM policy binding, including its - members, roles, and Cloud IAM conditions. The returned - Cloud IAM policies will only contain the bindings that - match your query. To learn more about the IAM policy - structure, see `IAM policy - doc `__. + against each IAM policy binding, including its + principals, roles, and IAM conditions. The returned IAM + policies will only contain the bindings that match your + query. To learn more about the IAM policy structure, see + the `IAM policy + documentation `__. Examples: @@ -1392,7 +1428,7 @@ async def sample_search_all_iam_policies(): - ``roles:roles/compute.admin`` to find IAM policy bindings that specify the Compute Admin role. - ``memberTypes:user`` to find IAM policy bindings that - contain the "user" member type. + contain the principal type "user". This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1406,6 +1442,7 @@ async def sample_search_all_iam_policies(): Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager: Search all IAM policies response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1523,7 +1560,7 @@ async def sample_analyze_iam_policy(): Returns: google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: A response message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. """ # Create or coerce a protobuf request object. @@ -1579,8 +1616,8 @@ async def analyze_iam_policy_longrunning(self, [google.longrunning.Operation][google.longrunning.Operation], which allows you to track the operation status. We recommend intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the request - to help callers to map responses to requests. + to poll the operation result. The metadata contains the metadata + for the long-running operation. .. code-block:: python @@ -1633,10 +1670,8 @@ async def sample_analyze_iam_policy_longrunning(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` - A response message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + The result type for the operation will be :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` A response message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. """ # Create or coerce a protobuf request object. @@ -1671,12 +1706,1365 @@ async def sample_analyze_iam_policy_longrunning(): response, self._client._transport.operations_client, asset_service.AnalyzeIamPolicyLongrunningResponse, - metadata_type=asset_service.AnalyzeIamPolicyLongrunningRequest, + metadata_type=asset_service.AnalyzeIamPolicyLongrunningMetadata, + ) + + # Done; return the response. + return response + + async def analyze_move(self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.AnalyzeMoveResponse: + r"""Analyze moving a resource to a specified destination + without kicking off the actual move. The analysis is + best effort depending on the user's permissions of + viewing different hierarchical policies and + configurations. The policies and configuration are + subject to change before the actual resource migration + takes place. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_analyze_move(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeMoveRequest( + resource="resource_value", + destination_parent="destination_parent_value", + ) + + # Make the request + response = await client.analyze_move(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeMoveRequest, dict]]): + The request object. The request message for performing + resource move analysis. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.AnalyzeMoveResponse: + The response message for resource + move analysis. + + """ + # Create or coerce a protobuf request object. + request = asset_service.AnalyzeMoveRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_move, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def query_assets(self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.QueryAssetsResponse: + r"""Issue a job that queries assets using a SQL statement compatible + with `BigQuery Standard + SQL `__. + + If the query execution finishes within timeout and there's no + pagination, the full query results will be returned in the + ``QueryAssetsResponse``. + + Otherwise, full query results can be obtained by issuing extra + requests with the ``job_reference`` from the a previous + ``QueryAssets`` call. + + Note, the query result has approximately 10 GB limitation + enforced by BigQuery + https://cloud.google.com/bigquery/docs/best-practices-performance-output, + queries return larger results will result in errors. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_query_assets(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.QueryAssetsRequest( + statement="statement_value", + parent="parent_value", + ) + + # Make the request + response = await client.query_assets(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.QueryAssetsRequest, dict]]): + The request object. QueryAssets request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.QueryAssetsResponse: + QueryAssets response. + """ + # Create or coerce a protobuf request object. + request = asset_service.QueryAssetsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.query_assets, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_saved_query(self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Creates a saved query in a parent + project/folder/organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_create_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.CreateSavedQueryRequest( + parent="parent_value", + saved_query_id="saved_query_id_value", + ) + + # Make the request + response = await client.create_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.CreateSavedQueryRequest, dict]]): + The request object. Request to create a saved query. + parent (:class:`str`): + Required. The name of the project/folder/organization + where this saved_query should be created in. It can only + be an organization number (such as "organizations/123"), + a folder number (such as "folders/123"), a project ID + (such as "projects/my-project-id"), or a project number + (such as "projects/12345"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + saved_query (:class:`google.cloud.asset_v1.types.SavedQuery`): + Required. The saved_query details. The ``name`` field + must be empty as it will be generated based on the + parent and saved_query_id. + + This corresponds to the ``saved_query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + saved_query_id (:class:`str`): + Required. The ID to use for the saved query, which must + be unique in the specified parent. It will become the + final component of the saved query's resource name. + + This value should be 4-63 characters, and valid + characters are ``[a-z][0-9]-``. + + Notice that this field is required in the saved query + creation, and the ``name`` field of the ``saved_query`` + will be ignored. + + This corresponds to the ``saved_query_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, saved_query, saved_query_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.CreateSavedQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if saved_query is not None: + request.saved_query = saved_query + if saved_query_id is not None: + request.saved_query_id = saved_query_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_saved_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) # Done; return the response. return response + async def get_saved_query(self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Gets details about a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_get_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.GetSavedQueryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.GetSavedQueryRequest, dict]]): + The request object. Request to get a saved query. + name (:class:`str`): + Required. The name of the saved query and it must be in + the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.GetSavedQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_saved_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_saved_queries(self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSavedQueriesAsyncPager: + r"""Lists all saved queries in a parent + project/folder/organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_list_saved_queries(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.ListSavedQueriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_saved_queries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.ListSavedQueriesRequest, dict]]): + The request object. Request to list saved queries. + parent (:class:`str`): + Required. The parent + project/folder/organization whose + savedQueries are to be listed. It can + only be using + project/folder/organization number (such + as "folders/12345")", or a project ID + (such as "projects/my-project-id"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager: + Response of listing saved queries. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.ListSavedQueriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_saved_queries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSavedQueriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_saved_query(self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Updates a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_update_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.UpdateSavedQueryRequest( + ) + + # Make the request + response = await client.update_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.UpdateSavedQueryRequest, dict]]): + The request object. Request to update a saved query. + saved_query (:class:`google.cloud.asset_v1.types.SavedQuery`): + Required. The saved query to update. + + The saved query's ``name`` field is used to identify the + one to update, which has format as below: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``saved_query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([saved_query, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.UpdateSavedQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if saved_query is not None: + request.saved_query = saved_query + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_saved_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("saved_query.name", request.saved_query.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_saved_query(self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_delete_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.DeleteSavedQueryRequest( + name="name_value", + ) + + # Make the request + await client.delete_saved_query(request=request) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.DeleteSavedQueryRequest, dict]]): + The request object. Request to delete a saved query. + name (:class:`str`): + Required. The name of the saved query to delete. It must + be in the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.DeleteSavedQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_saved_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def batch_get_effective_iam_policies(self, + request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + r"""Gets effective IAM policies for a batch of resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_batch_get_effective_iam_policies(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetEffectiveIamPoliciesRequest( + scope="scope_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + response = await client.batch_get_effective_iam_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest, dict]]): + The request object. A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: + A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + """ + # Create or coerce a protobuf request object. + request = asset_service.BatchGetEffectiveIamPoliciesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_get_effective_iam_policies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_org_policies(self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPoliciesAsyncPager: + r"""Analyzes organization policies under a scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_analyze_org_policies(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPoliciesRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest, dict]]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + scope (:class:`str`): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (:class:`str`): + Required. The name of the constraint + to analyze organization policies for. + The response only contains analyzed + organization policies for the provided + constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The expression to filter + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. + The only supported field is + ``consolidated_policy.attached_resource``, and the only + supported operator is ``=``. + + Example: + consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" + will return the org policy results of"folders/001". + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager: + The response message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.AnalyzeOrgPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_org_policies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.AnalyzeOrgPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_org_policy_governed_containers(self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: + r"""Analyzes organization policies governed containers + (projects, folders or organization) under a scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_analyze_org_policy_governed_containers(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_containers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest, dict]]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + scope (:class:`str`): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + The output containers will also be limited to the ones + governed by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (:class:`str`): + Required. The name of the constraint + to analyze governed containers for. The + analysis only contains organization + policies for the provided constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The expression to filter the governed containers in + result. The only supported field is ``parent``, and the + only supported operator is ``=``. + + Example: + parent="//cloudresourcemanager.googleapis.com/folders/001" + will return all containers under "folders/001". + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_org_policy_governed_containers, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_org_policy_governed_assets(self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: + r"""Analyzes organization policies governed assets (Google Cloud + resources or policies) under a scope. This RPC supports custom + constraints and the following 10 canned constraints: + + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization + + This RPC only returns either resources of types supported by + `searchable asset + types `__, + or IAM policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_analyze_org_policy_governed_assets(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_assets(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest, dict]]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + scope (:class:`str`): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + The output assets will also be limited to the ones + governed by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (:class:`str`): + Required. The name of the constraint + to analyze governed assets for. The + analysis only contains analyzed + organization policies for the provided + constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The expression to filter the governed assets in result. + The only supported fields for governed resources are + ``governed_resource.project`` and + ``governed_resource.folders``. The only supported fields + for governed iam policies are + ``governed_iam_policy.project`` and + ``governed_iam_policy.folders``. The only supported + operator is ``=``. + + Example 1: governed_resource.project="projects/12345678" + filter will return all governed resources under + projects/12345678 including the project ifself, if + applicable. + + Example 2: + governed_iam_policy.folders="folders/12345678" filter + will return all governed iam policies under + folders/12345678, if applicable. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_org_policy_governed_assets, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def __aenter__(self) -> "AssetServiceAsyncClient": return self diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index aeccfe63ae..65f82d2fef 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -40,6 +40,10 @@ from google.cloud.asset_v1.services.asset_service import pagers from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import AssetServiceGrpcTransport @@ -166,6 +170,28 @@ def transport(self) -> AssetServiceTransport: """ return self._transport + @staticmethod + def access_level_path(access_policy: str,access_level: str,) -> str: + """Returns a fully-qualified access_level string.""" + return "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) + + @staticmethod + def parse_access_level_path(path: str) -> Dict[str,str]: + """Parses a access_level path into its component segments.""" + m = re.match(r"^accessPolicies/(?P.+?)/accessLevels/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def access_policy_path(access_policy: str,) -> str: + """Returns a fully-qualified access_policy string.""" + return "accessPolicies/{access_policy}".format(access_policy=access_policy, ) + + @staticmethod + def parse_access_policy_path(path: str) -> Dict[str,str]: + """Parses a access_policy path into its component segments.""" + m = re.match(r"^accessPolicies/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def asset_path() -> str: """Returns a fully-qualified asset string.""" @@ -188,6 +214,39 @@ def parse_feed_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/feeds/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def inventory_path(project: str,location: str,instance: str,) -> str: + """Returns a fully-qualified inventory string.""" + return "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) + + @staticmethod + def parse_inventory_path(path: str) -> Dict[str,str]: + """Parses a inventory path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", path) + return m.groupdict() if m else {} + + @staticmethod + def saved_query_path(project: str,saved_query: str,) -> str: + """Returns a fully-qualified saved_query string.""" + return "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) + + @staticmethod + def parse_saved_query_path(path: str) -> Dict[str,str]: + """Parses a saved_query path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/savedQueries/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_perimeter_path(access_policy: str,service_perimeter: str,) -> str: + """Returns a fully-qualified service_perimeter string.""" + return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) + + @staticmethod + def parse_service_perimeter_path(path: str) -> Dict[str,str]: + """Parses a service_perimeter path into its component segments.""" + m = re.match(r"^accessPolicies/(?P.+?)/servicePerimeters/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str, ) -> str: """Returns a fully-qualified billing_account string.""" @@ -406,10 +465,10 @@ def export_assets(self, line represents a [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in the JSON format; for BigQuery table destinations, the output - table stores the fields in asset proto as columns. This API + table stores the fields in asset Protobuf as columns. This API implements the - [google.longrunning.Operation][google.longrunning.Operation] API - , which allows you to keep track of the export. We recommend + [google.longrunning.Operation][google.longrunning.Operation] + API, which allows you to keep track of the export. We recommend intervals of at least 2 seconds with exponential retry to poll the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. @@ -549,11 +608,13 @@ def sample_list_assets(): request (Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]): The request object. ListAssets request. parent (str): - Required. Name of the organization or project the assets - belong to. Format: "organizations/[organization-number]" - (such as "organizations/123"), "projects/[project-id]" - (such as "projects/my-project-id"), or - "projects/[project-number]" (such as "projects/12345"). + Required. Name of the organization, folder, or project + the assets belong to. Format: + "organizations/[organization-number]" (such as + "organizations/123"), "projects/[project-id]" (such as + "projects/my-project-id"), "projects/[project-number]" + (such as "projects/12345"), or "folders/[folder-number]" + (such as "folders/12345"). This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -567,6 +628,7 @@ def sample_list_assets(): Returns: google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager: ListAssets response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -761,8 +823,8 @@ def sample_create_feed(): be an organization number (such as "organizations/123"), a folder number (such as "folders/123"), a project ID - (such as "projects/my-project-id")", or - a project number (such as + (such as "projects/my-project-id"), or a + project number (such as "projects/12345"). This corresponds to the ``parent`` field @@ -1251,10 +1313,10 @@ def search_all_resources(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesPager: - r"""Searches all Cloud resources within the specified scope, such as - a project, folder, or organization. The caller must be granted - the ``cloudasset.assets.searchAllResources`` permission on the - desired scope, otherwise the request will be rejected. + r"""Searches all Google Cloud resources within the specified scope, + such as a project, folder, or organization. The caller must be + granted the ``cloudasset.assets.searchAllResources`` permission + on the desired scope, otherwise the request will be rejected. .. code-block:: python @@ -1312,46 +1374,64 @@ def sample_search_all_resources(): Examples: - - ``name:Important`` to find Cloud resources whose name - contains "Important" as a word. - - ``name=Important`` to find the Cloud resource whose - name is exactly "Important". - - ``displayName:Impor*`` to find Cloud resources whose - display name contains "Impor" as a prefix of any word - in the field. - - ``location:us-west*`` to find Cloud resources whose - location contains both "us" and "west" as prefixes. - - ``labels:prod`` to find Cloud resources whose labels - contain "prod" as a key or value. - - ``labels.env:prod`` to find Cloud resources that have - a label "env" and its value is "prod". - - ``labels.env:*`` to find Cloud resources that have a - label "env". - - ``kmsKey:key`` to find Cloud resources encrypted with - a customer-managed encryption key whose name contains - the word "key". - - ``state:ACTIVE`` to find Cloud resources whose state - contains "ACTIVE" as a word. - - ``NOT state:ACTIVE`` to find {{gcp_name}} resources + - ``name:Important`` to find Google Cloud resources + whose name contains "Important" as a word. + - ``name=Important`` to find the Google Cloud resource + whose name is exactly "Important". + - ``displayName:Impor*`` to find Google Cloud resources + whose display name contains "Impor" as a prefix of + any word in the field. + - ``location:us-west*`` to find Google Cloud resources + whose location contains both "us" and "west" as + prefixes. + - ``labels:prod`` to find Google Cloud resources whose + labels contain "prod" as a key or value. + - ``labels.env:prod`` to find Google Cloud resources + that have a label "env" and its value is "prod". + - ``labels.env:*`` to find Google Cloud resources that + have a label "env". + - ``kmsKey:key`` to find Google Cloud resources + encrypted with a customer-managed encryption key + whose name contains "key" as a word. This field is + deprecated. Please use the ``kmsKeys`` field to + retrieve Cloud KMS key information. + - ``kmsKeys:key`` to find Google Cloud resources + encrypted with customer-managed encryption keys whose + name contains the word "key". + - ``relationships:instance-group-1`` to find Google + Cloud resources that have relationships with + "instance-group-1" in the related resource name. + - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find + Compute Engine instances that have relationships of + type "INSTANCE_TO_INSTANCEGROUP". + - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` + to find Compute Engine instances that have + relationships with "instance-group-1" in the Compute + Engine instance group resource name, for relationship + type "INSTANCE_TO_INSTANCEGROUP". + - ``state:ACTIVE`` to find Google Cloud resources whose + state contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find Google Cloud resources whose state doesn't contain "ACTIVE" as a word. - - ``createTime<1609459200`` to find Cloud resources - that were created before "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 - 00:00:00 UTC" in seconds. - - ``updateTime>1609459200`` to find Cloud resources - that were updated after "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 - 00:00:00 UTC" in seconds. - - ``Important`` to find Cloud resources that contain - "Important" as a word in any of the searchable - fields. - - ``Impor*`` to find Cloud resources that contain - "Impor" as a prefix of any word in any of the + - ``createTime<1609459200`` to find Google Cloud + resources that were created before "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``updateTime>1609459200`` to find Google Cloud + resources that were updated after "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``Important`` to find Google Cloud resources that + contain "Important" as a word in any of the + searchable fields. + - ``Impor*`` to find Google Cloud resources that + contain "Impor" as a prefix of any word in any of the searchable fields. - ``Important location:(us-west1 OR global)`` to find - Cloud resources that contain "Important" as a word in - any of the searchable fields and are also located in - the "us-west1" region or the "global" location. + Google Cloud resources that contain "Important" as a + word in any of the searchable fields and are also + located in the "us-west1" region or the "global" + location. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1388,6 +1468,7 @@ def sample_search_all_resources(): Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager: Search all resources response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1516,12 +1597,12 @@ def sample_search_all_iam_policies(): for more information. If not specified or empty, it will search all the IAM policies within the specified ``scope``. Note that the query string is compared - against each Cloud IAM policy binding, including its - members, roles, and Cloud IAM conditions. The returned - Cloud IAM policies will only contain the bindings that - match your query. To learn more about the IAM policy - structure, see `IAM policy - doc `__. + against each IAM policy binding, including its + principals, roles, and IAM conditions. The returned IAM + policies will only contain the bindings that match your + query. To learn more about the IAM policy structure, see + the `IAM policy + documentation `__. Examples: @@ -1562,7 +1643,7 @@ def sample_search_all_iam_policies(): - ``roles:roles/compute.admin`` to find IAM policy bindings that specify the Compute Admin role. - ``memberTypes:user`` to find IAM policy bindings that - contain the "user" member type. + contain the principal type "user". This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1576,6 +1657,7 @@ def sample_search_all_iam_policies(): Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager: Search all IAM policies response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1686,7 +1768,7 @@ def sample_analyze_iam_policy(): Returns: google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: A response message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. """ # Create or coerce a protobuf request object. @@ -1737,8 +1819,8 @@ def analyze_iam_policy_longrunning(self, [google.longrunning.Operation][google.longrunning.Operation], which allows you to track the operation status. We recommend intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the request - to help callers to map responses to requests. + to poll the operation result. The metadata contains the metadata + for the long-running operation. .. code-block:: python @@ -1791,10 +1873,8 @@ def sample_analyze_iam_policy_longrunning(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` - A response message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + The result type for the operation will be :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` A response message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. """ # Create or coerce a protobuf request object. @@ -1830,24 +1910,1382 @@ def sample_analyze_iam_policy_longrunning(): response, self._transport.operations_client, asset_service.AnalyzeIamPolicyLongrunningResponse, - metadata_type=asset_service.AnalyzeIamPolicyLongrunningRequest, + metadata_type=asset_service.AnalyzeIamPolicyLongrunningMetadata, ) # Done; return the response. return response - def __enter__(self) -> "AssetServiceClient": - return self + def analyze_move(self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.AnalyzeMoveResponse: + r"""Analyze moving a resource to a specified destination + without kicking off the actual move. The analysis is + best effort depending on the user's permissions of + viewing different hierarchical policies and + configurations. The policies and configuration are + subject to change before the actual resource migration + takes place. - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_analyze_move(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeMoveRequest( + resource="resource_value", + destination_parent="destination_parent_value", + ) + + # Make the request + response = client.analyze_move(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.AnalyzeMoveRequest, dict]): + The request object. The request message for performing + resource move analysis. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.AnalyzeMoveResponse: + The response message for resource + move analysis. - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! """ - self.transport.close() + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeMoveRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeMoveRequest): + request = asset_service.AnalyzeMoveRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_move] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def query_assets(self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.QueryAssetsResponse: + r"""Issue a job that queries assets using a SQL statement compatible + with `BigQuery Standard + SQL `__. + + If the query execution finishes within timeout and there's no + pagination, the full query results will be returned in the + ``QueryAssetsResponse``. + + Otherwise, full query results can be obtained by issuing extra + requests with the ``job_reference`` from the a previous + ``QueryAssets`` call. + + Note, the query result has approximately 10 GB limitation + enforced by BigQuery + https://cloud.google.com/bigquery/docs/best-practices-performance-output, + queries return larger results will result in errors. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_query_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.QueryAssetsRequest( + statement="statement_value", + parent="parent_value", + ) + + # Make the request + response = client.query_assets(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.QueryAssetsRequest, dict]): + The request object. QueryAssets request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.QueryAssetsResponse: + QueryAssets response. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.QueryAssetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.QueryAssetsRequest): + request = asset_service.QueryAssetsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_saved_query(self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Creates a saved query in a parent + project/folder/organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_create_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.CreateSavedQueryRequest( + parent="parent_value", + saved_query_id="saved_query_id_value", + ) + + # Make the request + response = client.create_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.CreateSavedQueryRequest, dict]): + The request object. Request to create a saved query. + parent (str): + Required. The name of the project/folder/organization + where this saved_query should be created in. It can only + be an organization number (such as "organizations/123"), + a folder number (such as "folders/123"), a project ID + (such as "projects/my-project-id"), or a project number + (such as "projects/12345"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + saved_query (google.cloud.asset_v1.types.SavedQuery): + Required. The saved_query details. The ``name`` field + must be empty as it will be generated based on the + parent and saved_query_id. + + This corresponds to the ``saved_query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + saved_query_id (str): + Required. The ID to use for the saved query, which must + be unique in the specified parent. It will become the + final component of the saved query's resource name. + + This value should be 4-63 characters, and valid + characters are ``[a-z][0-9]-``. + + Notice that this field is required in the saved query + creation, and the ``name`` field of the ``saved_query`` + will be ignored. + + This corresponds to the ``saved_query_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, saved_query, saved_query_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.CreateSavedQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.CreateSavedQueryRequest): + request = asset_service.CreateSavedQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if saved_query is not None: + request.saved_query = saved_query + if saved_query_id is not None: + request.saved_query_id = saved_query_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_saved_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_saved_query(self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Gets details about a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_get_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.GetSavedQueryRequest( + name="name_value", + ) + + # Make the request + response = client.get_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.GetSavedQueryRequest, dict]): + The request object. Request to get a saved query. + name (str): + Required. The name of the saved query and it must be in + the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.GetSavedQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.GetSavedQueryRequest): + request = asset_service.GetSavedQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_saved_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_saved_queries(self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSavedQueriesPager: + r"""Lists all saved queries in a parent + project/folder/organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_list_saved_queries(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListSavedQueriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_saved_queries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.ListSavedQueriesRequest, dict]): + The request object. Request to list saved queries. + parent (str): + Required. The parent + project/folder/organization whose + savedQueries are to be listed. It can + only be using + project/folder/organization number (such + as "folders/12345")", or a project ID + (such as "projects/my-project-id"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager: + Response of listing saved queries. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.ListSavedQueriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.ListSavedQueriesRequest): + request = asset_service.ListSavedQueriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_saved_queries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSavedQueriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_saved_query(self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Updates a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_update_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.UpdateSavedQueryRequest( + ) + + # Make the request + response = client.update_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.UpdateSavedQueryRequest, dict]): + The request object. Request to update a saved query. + saved_query (google.cloud.asset_v1.types.SavedQuery): + Required. The saved query to update. + + The saved query's ``name`` field is used to identify the + one to update, which has format as below: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``saved_query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([saved_query, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.UpdateSavedQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.UpdateSavedQueryRequest): + request = asset_service.UpdateSavedQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if saved_query is not None: + request.saved_query = saved_query + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_saved_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("saved_query.name", request.saved_query.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_saved_query(self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_delete_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.DeleteSavedQueryRequest( + name="name_value", + ) + + # Make the request + client.delete_saved_query(request=request) + + Args: + request (Union[google.cloud.asset_v1.types.DeleteSavedQueryRequest, dict]): + The request object. Request to delete a saved query. + name (str): + Required. The name of the saved query to delete. It must + be in the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.DeleteSavedQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.DeleteSavedQueryRequest): + request = asset_service.DeleteSavedQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_saved_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def batch_get_effective_iam_policies(self, + request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + r"""Gets effective IAM policies for a batch of resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_batch_get_effective_iam_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetEffectiveIamPoliciesRequest( + scope="scope_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + response = client.batch_get_effective_iam_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest, dict]): + The request object. A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: + A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.BatchGetEffectiveIamPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.BatchGetEffectiveIamPoliciesRequest): + request = asset_service.BatchGetEffectiveIamPoliciesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_get_effective_iam_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_org_policies(self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPoliciesPager: + r"""Analyzes organization policies under a scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_analyze_org_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPoliciesRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest, dict]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (str): + Required. The name of the constraint + to analyze organization policies for. + The response only contains analyzed + organization policies for the provided + constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + The expression to filter + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. + The only supported field is + ``consolidated_policy.attached_resource``, and the only + supported operator is ``=``. + + Example: + consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" + will return the org policy results of"folders/001". + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager: + The response message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeOrgPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeOrgPoliciesRequest): + request = asset_service.AnalyzeOrgPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_org_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AnalyzeOrgPoliciesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_org_policy_governed_containers(self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: + r"""Analyzes organization policies governed containers + (projects, folders or organization) under a scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_analyze_org_policy_governed_containers(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_containers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest, dict]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + The output containers will also be limited to the ones + governed by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (str): + Required. The name of the constraint + to analyze governed containers for. The + analysis only contains organization + policies for the provided constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + The expression to filter the governed containers in + result. The only supported field is ``parent``, and the + only supported operator is ``=``. + + Example: + parent="//cloudresourcemanager.googleapis.com/folders/001" + will return all containers under "folders/001". + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeOrgPolicyGovernedContainersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_containers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AnalyzeOrgPolicyGovernedContainersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_org_policy_governed_assets(self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: + r"""Analyzes organization policies governed assets (Google Cloud + resources or policies) under a scope. This RPC supports custom + constraints and the following 10 canned constraints: + + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization + + This RPC only returns either resources of types supported by + `searchable asset + types `__, + or IAM policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_analyze_org_policy_governed_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_assets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest, dict]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + The output assets will also be limited to the ones + governed by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (str): + Required. The name of the constraint + to analyze governed assets for. The + analysis only contains analyzed + organization policies for the provided + constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + The expression to filter the governed assets in result. + The only supported fields for governed resources are + ``governed_resource.project`` and + ``governed_resource.folders``. The only supported fields + for governed iam policies are + ``governed_iam_policy.project`` and + ``governed_iam_policy.folders``. The only supported + operator is ``=``. + + Example 1: governed_resource.project="projects/12345678" + filter will return all governed resources under + projects/12345678 including the project ifself, if + applicable. + + Example 2: + governed_iam_policy.folders="folders/12345678" filter + will return all governed iam policies under + folders/12345678, if applicable. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeOrgPolicyGovernedAssetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AnalyzeOrgPolicyGovernedAssetsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AssetServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 8f7f9706d4..8f71ac0df3 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -380,3 +380,487 @@ async def async_generator(): def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSavedQueriesPager: + """A pager for iterating through ``list_saved_queries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``saved_queries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSavedQueries`` requests and continue to iterate + through the ``saved_queries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.ListSavedQueriesResponse], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.ListSavedQueriesRequest): + The initial request object. + response (google.cloud.asset_v1.types.ListSavedQueriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.ListSavedQueriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[asset_service.ListSavedQueriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[asset_service.SavedQuery]: + for page in self.pages: + yield from page.saved_queries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSavedQueriesAsyncPager: + """A pager for iterating through ``list_saved_queries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``saved_queries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSavedQueries`` requests and continue to iterate + through the ``saved_queries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.ListSavedQueriesResponse]], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.ListSavedQueriesRequest): + The initial request object. + response (google.cloud.asset_v1.types.ListSavedQueriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.ListSavedQueriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[asset_service.ListSavedQueriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.SavedQuery]: + async def async_generator(): + async for page in self.pages: + for response in page.saved_queries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPoliciesPager: + """A pager for iterating through ``analyze_org_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``org_policy_results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AnalyzeOrgPolicies`` requests and continue to iterate + through the ``org_policy_results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.AnalyzeOrgPoliciesResponse], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: + for page in self.pages: + yield from page.org_policy_results + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPoliciesAsyncPager: + """A pager for iterating through ``analyze_org_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``org_policy_results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``AnalyzeOrgPolicies`` requests and continue to iterate + through the ``org_policy_results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPoliciesResponse]], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: + async def async_generator(): + async for page in self.pages: + for response in page.org_policy_results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPolicyGovernedContainersPager: + """A pager for iterating through ``analyze_org_policy_governed_containers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``governed_containers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AnalyzeOrgPolicyGovernedContainers`` requests and continue to iterate + through the ``governed_containers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedContainersResponse], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: + for page in self.pages: + yield from page.governed_containers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPolicyGovernedContainersAsyncPager: + """A pager for iterating through ``analyze_org_policy_governed_containers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``governed_containers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``AnalyzeOrgPolicyGovernedContainers`` requests and continue to iterate + through the ``governed_containers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: + async def async_generator(): + async for page in self.pages: + for response in page.governed_containers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPolicyGovernedAssetsPager: + """A pager for iterating through ``analyze_org_policy_governed_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``governed_assets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AnalyzeOrgPolicyGovernedAssets`` requests and continue to iterate + through the ``governed_assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: + for page in self.pages: + yield from page.governed_assets + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPolicyGovernedAssetsAsyncPager: + """A pager for iterating through ``analyze_org_policy_governed_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``governed_assets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``AnalyzeOrgPolicyGovernedAssets`` requests and continue to iterate + through the ``governed_assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: + async def async_generator(): + async for page in self.pages: + for response in page.governed_assets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 55b9835f01..d46bee37dc 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -28,7 +28,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -224,6 +224,61 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.analyze_move: gapic_v1.method.wrap_method( + self.analyze_move, + default_timeout=None, + client_info=client_info, + ), + self.query_assets: gapic_v1.method.wrap_method( + self.query_assets, + default_timeout=None, + client_info=client_info, + ), + self.create_saved_query: gapic_v1.method.wrap_method( + self.create_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.get_saved_query: gapic_v1.method.wrap_method( + self.get_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.list_saved_queries: gapic_v1.method.wrap_method( + self.list_saved_queries, + default_timeout=None, + client_info=client_info, + ), + self.update_saved_query: gapic_v1.method.wrap_method( + self.update_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.delete_saved_query: gapic_v1.method.wrap_method( + self.delete_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.batch_get_effective_iam_policies: gapic_v1.method.wrap_method( + self.batch_get_effective_iam_policies, + default_timeout=None, + client_info=client_info, + ), + self.analyze_org_policies: gapic_v1.method.wrap_method( + self.analyze_org_policies, + default_timeout=None, + client_info=client_info, + ), + self.analyze_org_policy_governed_containers: gapic_v1.method.wrap_method( + self.analyze_org_policy_governed_containers, + default_timeout=None, + client_info=client_info, + ), + self.analyze_org_policy_governed_assets: gapic_v1.method.wrap_method( + self.analyze_org_policy_governed_assets, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -348,6 +403,114 @@ def analyze_iam_policy_longrunning(self) -> Callable[ ]]: raise NotImplementedError() + @property + def analyze_move(self) -> Callable[ + [asset_service.AnalyzeMoveRequest], + Union[ + asset_service.AnalyzeMoveResponse, + Awaitable[asset_service.AnalyzeMoveResponse] + ]]: + raise NotImplementedError() + + @property + def query_assets(self) -> Callable[ + [asset_service.QueryAssetsRequest], + Union[ + asset_service.QueryAssetsResponse, + Awaitable[asset_service.QueryAssetsResponse] + ]]: + raise NotImplementedError() + + @property + def create_saved_query(self) -> Callable[ + [asset_service.CreateSavedQueryRequest], + Union[ + asset_service.SavedQuery, + Awaitable[asset_service.SavedQuery] + ]]: + raise NotImplementedError() + + @property + def get_saved_query(self) -> Callable[ + [asset_service.GetSavedQueryRequest], + Union[ + asset_service.SavedQuery, + Awaitable[asset_service.SavedQuery] + ]]: + raise NotImplementedError() + + @property + def list_saved_queries(self) -> Callable[ + [asset_service.ListSavedQueriesRequest], + Union[ + asset_service.ListSavedQueriesResponse, + Awaitable[asset_service.ListSavedQueriesResponse] + ]]: + raise NotImplementedError() + + @property + def update_saved_query(self) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + Union[ + asset_service.SavedQuery, + Awaitable[asset_service.SavedQuery] + ]]: + raise NotImplementedError() + + @property + def delete_saved_query(self) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def batch_get_effective_iam_policies(self) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + Union[ + asset_service.BatchGetEffectiveIamPoliciesResponse, + Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_org_policies(self) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + Union[ + asset_service.AnalyzeOrgPoliciesResponse, + Awaitable[asset_service.AnalyzeOrgPoliciesResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_org_policy_governed_containers(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_org_policy_governed_assets(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse] + ]]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index ab3a577ca8..53958bcf81 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -26,7 +26,7 @@ import grpc # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO @@ -257,10 +257,10 @@ def export_assets(self) -> Callable[ line represents a [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in the JSON format; for BigQuery table destinations, the output - table stores the fields in asset proto as columns. This API + table stores the fields in asset Protobuf as columns. This API implements the - [google.longrunning.Operation][google.longrunning.Operation] API - , which allows you to keep track of the export. We recommend + [google.longrunning.Operation][google.longrunning.Operation] + API, which allows you to keep track of the export. We recommend intervals of at least 2 seconds with exponential retry to poll the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. @@ -481,10 +481,10 @@ def search_all_resources(self) -> Callable[ asset_service.SearchAllResourcesResponse]: r"""Return a callable for the search all resources method over gRPC. - Searches all Cloud resources within the specified scope, such as - a project, folder, or organization. The caller must be granted - the ``cloudasset.assets.searchAllResources`` permission on the - desired scope, otherwise the request will be rejected. + Searches all Google Cloud resources within the specified scope, + such as a project, folder, or organization. The caller must be + granted the ``cloudasset.assets.searchAllResources`` permission + on the desired scope, otherwise the request will be rejected. Returns: Callable[[~.SearchAllResourcesRequest], @@ -576,8 +576,8 @@ def analyze_iam_policy_longrunning(self) -> Callable[ [google.longrunning.Operation][google.longrunning.Operation], which allows you to track the operation status. We recommend intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the request - to help callers to map responses to requests. + to poll the operation result. The metadata contains the metadata + for the long-running operation. Returns: Callable[[~.AnalyzeIamPolicyLongrunningRequest], @@ -597,9 +597,358 @@ def analyze_iam_policy_longrunning(self) -> Callable[ ) return self._stubs['analyze_iam_policy_longrunning'] + @property + def analyze_move(self) -> Callable[ + [asset_service.AnalyzeMoveRequest], + asset_service.AnalyzeMoveResponse]: + r"""Return a callable for the analyze move method over gRPC. + + Analyze moving a resource to a specified destination + without kicking off the actual move. The analysis is + best effort depending on the user's permissions of + viewing different hierarchical policies and + configurations. The policies and configuration are + subject to change before the actual resource migration + takes place. + + Returns: + Callable[[~.AnalyzeMoveRequest], + ~.AnalyzeMoveResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_move' not in self._stubs: + self._stubs['analyze_move'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeMove', + request_serializer=asset_service.AnalyzeMoveRequest.serialize, + response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, + ) + return self._stubs['analyze_move'] + + @property + def query_assets(self) -> Callable[ + [asset_service.QueryAssetsRequest], + asset_service.QueryAssetsResponse]: + r"""Return a callable for the query assets method over gRPC. + + Issue a job that queries assets using a SQL statement compatible + with `BigQuery Standard + SQL `__. + + If the query execution finishes within timeout and there's no + pagination, the full query results will be returned in the + ``QueryAssetsResponse``. + + Otherwise, full query results can be obtained by issuing extra + requests with the ``job_reference`` from the a previous + ``QueryAssets`` call. + + Note, the query result has approximately 10 GB limitation + enforced by BigQuery + https://cloud.google.com/bigquery/docs/best-practices-performance-output, + queries return larger results will result in errors. + + Returns: + Callable[[~.QueryAssetsRequest], + ~.QueryAssetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_assets' not in self._stubs: + self._stubs['query_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/QueryAssets', + request_serializer=asset_service.QueryAssetsRequest.serialize, + response_deserializer=asset_service.QueryAssetsResponse.deserialize, + ) + return self._stubs['query_assets'] + + @property + def create_saved_query(self) -> Callable[ + [asset_service.CreateSavedQueryRequest], + asset_service.SavedQuery]: + r"""Return a callable for the create saved query method over gRPC. + + Creates a saved query in a parent + project/folder/organization. + + Returns: + Callable[[~.CreateSavedQueryRequest], + ~.SavedQuery]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_saved_query' not in self._stubs: + self._stubs['create_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + request_serializer=asset_service.CreateSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['create_saved_query'] + + @property + def get_saved_query(self) -> Callable[ + [asset_service.GetSavedQueryRequest], + asset_service.SavedQuery]: + r"""Return a callable for the get saved query method over gRPC. + + Gets details about a saved query. + + Returns: + Callable[[~.GetSavedQueryRequest], + ~.SavedQuery]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_saved_query' not in self._stubs: + self._stubs['get_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/GetSavedQuery', + request_serializer=asset_service.GetSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['get_saved_query'] + + @property + def list_saved_queries(self) -> Callable[ + [asset_service.ListSavedQueriesRequest], + asset_service.ListSavedQueriesResponse]: + r"""Return a callable for the list saved queries method over gRPC. + + Lists all saved queries in a parent + project/folder/organization. + + Returns: + Callable[[~.ListSavedQueriesRequest], + ~.ListSavedQueriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_saved_queries' not in self._stubs: + self._stubs['list_saved_queries'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ListSavedQueries', + request_serializer=asset_service.ListSavedQueriesRequest.serialize, + response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, + ) + return self._stubs['list_saved_queries'] + + @property + def update_saved_query(self) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + asset_service.SavedQuery]: + r"""Return a callable for the update saved query method over gRPC. + + Updates a saved query. + + Returns: + Callable[[~.UpdateSavedQueryRequest], + ~.SavedQuery]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_saved_query' not in self._stubs: + self._stubs['update_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + request_serializer=asset_service.UpdateSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['update_saved_query'] + + @property + def delete_saved_query(self) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete saved query method over gRPC. + + Deletes a saved query. + + Returns: + Callable[[~.DeleteSavedQueryRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_saved_query' not in self._stubs: + self._stubs['delete_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + request_serializer=asset_service.DeleteSavedQueryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_saved_query'] + + @property + def batch_get_effective_iam_policies(self) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + asset_service.BatchGetEffectiveIamPoliciesResponse]: + r"""Return a callable for the batch get effective iam + policies method over gRPC. + + Gets effective IAM policies for a batch of resources. + + Returns: + Callable[[~.BatchGetEffectiveIamPoliciesRequest], + ~.BatchGetEffectiveIamPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_effective_iam_policies' not in self._stubs: + self._stubs['batch_get_effective_iam_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', + request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, + response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, + ) + return self._stubs['batch_get_effective_iam_policies'] + + @property + def analyze_org_policies(self) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + asset_service.AnalyzeOrgPoliciesResponse]: + r"""Return a callable for the analyze org policies method over gRPC. + + Analyzes organization policies under a scope. + + Returns: + Callable[[~.AnalyzeOrgPoliciesRequest], + ~.AnalyzeOrgPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policies' not in self._stubs: + self._stubs['analyze_org_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, + ) + return self._stubs['analyze_org_policies'] + + @property + def analyze_org_policy_governed_containers(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + r"""Return a callable for the analyze org policy governed + containers method over gRPC. + + Analyzes organization policies governed containers + (projects, folders or organization) under a scope. + + Returns: + Callable[[~.AnalyzeOrgPolicyGovernedContainersRequest], + ~.AnalyzeOrgPolicyGovernedContainersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policy_governed_containers' not in self._stubs: + self._stubs['analyze_org_policy_governed_containers'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', + request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, + ) + return self._stubs['analyze_org_policy_governed_containers'] + + @property + def analyze_org_policy_governed_assets(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + r"""Return a callable for the analyze org policy governed + assets method over gRPC. + + Analyzes organization policies governed assets (Google Cloud + resources or policies) under a scope. This RPC supports custom + constraints and the following 10 canned constraints: + + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization + + This RPC only returns either resources of types supported by + `searchable asset + types `__, + or IAM policies. + + Returns: + Callable[[~.AnalyzeOrgPolicyGovernedAssetsRequest], + ~.AnalyzeOrgPolicyGovernedAssetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policy_governed_assets' not in self._stubs: + self._stubs['analyze_org_policy_governed_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', + request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, + ) + return self._stubs['analyze_org_policy_governed_assets'] + def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + @property def kind(self) -> str: return "grpc" diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index bd9417a8d2..c8f62d768b 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -26,7 +26,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .grpc import AssetServiceGrpcTransport @@ -260,10 +260,10 @@ def export_assets(self) -> Callable[ line represents a [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in the JSON format; for BigQuery table destinations, the output - table stores the fields in asset proto as columns. This API + table stores the fields in asset Protobuf as columns. This API implements the - [google.longrunning.Operation][google.longrunning.Operation] API - , which allows you to keep track of the export. We recommend + [google.longrunning.Operation][google.longrunning.Operation] + API, which allows you to keep track of the export. We recommend intervals of at least 2 seconds with exponential retry to poll the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. @@ -484,10 +484,10 @@ def search_all_resources(self) -> Callable[ Awaitable[asset_service.SearchAllResourcesResponse]]: r"""Return a callable for the search all resources method over gRPC. - Searches all Cloud resources within the specified scope, such as - a project, folder, or organization. The caller must be granted - the ``cloudasset.assets.searchAllResources`` permission on the - desired scope, otherwise the request will be rejected. + Searches all Google Cloud resources within the specified scope, + such as a project, folder, or organization. The caller must be + granted the ``cloudasset.assets.searchAllResources`` permission + on the desired scope, otherwise the request will be rejected. Returns: Callable[[~.SearchAllResourcesRequest], @@ -579,8 +579,8 @@ def analyze_iam_policy_longrunning(self) -> Callable[ [google.longrunning.Operation][google.longrunning.Operation], which allows you to track the operation status. We recommend intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the request - to help callers to map responses to requests. + to poll the operation result. The metadata contains the metadata + for the long-running operation. Returns: Callable[[~.AnalyzeIamPolicyLongrunningRequest], @@ -600,9 +600,358 @@ def analyze_iam_policy_longrunning(self) -> Callable[ ) return self._stubs['analyze_iam_policy_longrunning'] + @property + def analyze_move(self) -> Callable[ + [asset_service.AnalyzeMoveRequest], + Awaitable[asset_service.AnalyzeMoveResponse]]: + r"""Return a callable for the analyze move method over gRPC. + + Analyze moving a resource to a specified destination + without kicking off the actual move. The analysis is + best effort depending on the user's permissions of + viewing different hierarchical policies and + configurations. The policies and configuration are + subject to change before the actual resource migration + takes place. + + Returns: + Callable[[~.AnalyzeMoveRequest], + Awaitable[~.AnalyzeMoveResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_move' not in self._stubs: + self._stubs['analyze_move'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeMove', + request_serializer=asset_service.AnalyzeMoveRequest.serialize, + response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, + ) + return self._stubs['analyze_move'] + + @property + def query_assets(self) -> Callable[ + [asset_service.QueryAssetsRequest], + Awaitable[asset_service.QueryAssetsResponse]]: + r"""Return a callable for the query assets method over gRPC. + + Issue a job that queries assets using a SQL statement compatible + with `BigQuery Standard + SQL `__. + + If the query execution finishes within timeout and there's no + pagination, the full query results will be returned in the + ``QueryAssetsResponse``. + + Otherwise, full query results can be obtained by issuing extra + requests with the ``job_reference`` from the a previous + ``QueryAssets`` call. + + Note, the query result has approximately 10 GB limitation + enforced by BigQuery + https://cloud.google.com/bigquery/docs/best-practices-performance-output, + queries return larger results will result in errors. + + Returns: + Callable[[~.QueryAssetsRequest], + Awaitable[~.QueryAssetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_assets' not in self._stubs: + self._stubs['query_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/QueryAssets', + request_serializer=asset_service.QueryAssetsRequest.serialize, + response_deserializer=asset_service.QueryAssetsResponse.deserialize, + ) + return self._stubs['query_assets'] + + @property + def create_saved_query(self) -> Callable[ + [asset_service.CreateSavedQueryRequest], + Awaitable[asset_service.SavedQuery]]: + r"""Return a callable for the create saved query method over gRPC. + + Creates a saved query in a parent + project/folder/organization. + + Returns: + Callable[[~.CreateSavedQueryRequest], + Awaitable[~.SavedQuery]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_saved_query' not in self._stubs: + self._stubs['create_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + request_serializer=asset_service.CreateSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['create_saved_query'] + + @property + def get_saved_query(self) -> Callable[ + [asset_service.GetSavedQueryRequest], + Awaitable[asset_service.SavedQuery]]: + r"""Return a callable for the get saved query method over gRPC. + + Gets details about a saved query. + + Returns: + Callable[[~.GetSavedQueryRequest], + Awaitable[~.SavedQuery]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_saved_query' not in self._stubs: + self._stubs['get_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/GetSavedQuery', + request_serializer=asset_service.GetSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['get_saved_query'] + + @property + def list_saved_queries(self) -> Callable[ + [asset_service.ListSavedQueriesRequest], + Awaitable[asset_service.ListSavedQueriesResponse]]: + r"""Return a callable for the list saved queries method over gRPC. + + Lists all saved queries in a parent + project/folder/organization. + + Returns: + Callable[[~.ListSavedQueriesRequest], + Awaitable[~.ListSavedQueriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_saved_queries' not in self._stubs: + self._stubs['list_saved_queries'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ListSavedQueries', + request_serializer=asset_service.ListSavedQueriesRequest.serialize, + response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, + ) + return self._stubs['list_saved_queries'] + + @property + def update_saved_query(self) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + Awaitable[asset_service.SavedQuery]]: + r"""Return a callable for the update saved query method over gRPC. + + Updates a saved query. + + Returns: + Callable[[~.UpdateSavedQueryRequest], + Awaitable[~.SavedQuery]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_saved_query' not in self._stubs: + self._stubs['update_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + request_serializer=asset_service.UpdateSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['update_saved_query'] + + @property + def delete_saved_query(self) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete saved query method over gRPC. + + Deletes a saved query. + + Returns: + Callable[[~.DeleteSavedQueryRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_saved_query' not in self._stubs: + self._stubs['delete_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + request_serializer=asset_service.DeleteSavedQueryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_saved_query'] + + @property + def batch_get_effective_iam_policies(self) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse]]: + r"""Return a callable for the batch get effective iam + policies method over gRPC. + + Gets effective IAM policies for a batch of resources. + + Returns: + Callable[[~.BatchGetEffectiveIamPoliciesRequest], + Awaitable[~.BatchGetEffectiveIamPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_effective_iam_policies' not in self._stubs: + self._stubs['batch_get_effective_iam_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', + request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, + response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, + ) + return self._stubs['batch_get_effective_iam_policies'] + + @property + def analyze_org_policies(self) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + Awaitable[asset_service.AnalyzeOrgPoliciesResponse]]: + r"""Return a callable for the analyze org policies method over gRPC. + + Analyzes organization policies under a scope. + + Returns: + Callable[[~.AnalyzeOrgPoliciesRequest], + Awaitable[~.AnalyzeOrgPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policies' not in self._stubs: + self._stubs['analyze_org_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, + ) + return self._stubs['analyze_org_policies'] + + @property + def analyze_org_policy_governed_containers(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]]: + r"""Return a callable for the analyze org policy governed + containers method over gRPC. + + Analyzes organization policies governed containers + (projects, folders or organization) under a scope. + + Returns: + Callable[[~.AnalyzeOrgPolicyGovernedContainersRequest], + Awaitable[~.AnalyzeOrgPolicyGovernedContainersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policy_governed_containers' not in self._stubs: + self._stubs['analyze_org_policy_governed_containers'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', + request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, + ) + return self._stubs['analyze_org_policy_governed_containers'] + + @property + def analyze_org_policy_governed_assets(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]]: + r"""Return a callable for the analyze org policy governed + assets method over gRPC. + + Analyzes organization policies governed assets (Google Cloud + resources or policies) under a scope. This RPC supports custom + constraints and the following 10 canned constraints: + + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization + + This RPC only returns either resources of types supported by + `searchable asset + types `__, + or IAM policies. + + Returns: + Callable[[~.AnalyzeOrgPolicyGovernedAssetsRequest], + Awaitable[~.AnalyzeOrgPolicyGovernedAssetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policy_governed_assets' not in self._stubs: + self._stubs['analyze_org_policy_governed_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', + request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, + ) + return self._stubs['analyze_org_policy_governed_assets'] + def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + __all__ = ( 'AssetServiceGrpcAsyncIOTransport', diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 6d2777d96d..662dc38002 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -41,8 +41,8 @@ from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -85,6 +85,38 @@ def post_analyze_iam_policy_longrunning(self, response): logging.log(f"Received response: {response}") return response + def pre_analyze_move(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_move(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_org_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_org_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_org_policy_governed_assets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_org_policy_governed_assets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_org_policy_governed_containers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_org_policy_governed_containers(self, response): + logging.log(f"Received response: {response}") + return response + def pre_batch_get_assets_history(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -93,6 +125,14 @@ def post_batch_get_assets_history(self, response): logging.log(f"Received response: {response}") return response + def pre_batch_get_effective_iam_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_effective_iam_policies(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_feed(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -101,10 +141,22 @@ def post_create_feed(self, response): logging.log(f"Received response: {response}") return response + def pre_create_saved_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_saved_query(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_feed(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_saved_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_export_assets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -121,6 +173,14 @@ def post_get_feed(self, response): logging.log(f"Received response: {response}") return response + def pre_get_saved_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_saved_query(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_assets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -137,6 +197,22 @@ def post_list_feeds(self, response): logging.log(f"Received response: {response}") return response + def pre_list_saved_queries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_saved_queries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_assets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_assets(self, response): + logging.log(f"Received response: {response}") + return response + def pre_search_all_iam_policies(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -161,6 +237,14 @@ def post_update_feed(self, response): logging.log(f"Received response: {response}") return response + def pre_update_saved_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_saved_query(self, response): + logging.log(f"Received response: {response}") + return response + transport = AssetServiceRestTransport(interceptor=MyCustomAssetServiceInterceptor()) client = AssetServiceClient(transport=transport) @@ -193,6 +277,70 @@ def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPo def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for analyze_iam_policy_longrunning + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_move + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asset_service.AnalyzeMoveResponse: + """Post-rpc interceptor for analyze_move + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_org_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesResponse) -> asset_service.AnalyzeOrgPoliciesResponse: + """Post-rpc interceptor for analyze_org_policies + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_org_policy_governed_assets + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_org_policy_governed_assets(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + """Post-rpc interceptor for analyze_org_policy_governed_assets + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_org_policy_governed_containers + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_org_policy_governed_containers(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + """Post-rpc interceptor for analyze_org_policy_governed_containers + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -209,6 +357,22 @@ def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHist def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHistoryResponse) -> asset_service.BatchGetAssetsHistoryResponse: """Post-rpc interceptor for batch_get_assets_history + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_get_effective_iam_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + """Post-rpc interceptor for batch_get_effective_iam_policies + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -225,6 +389,22 @@ def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Se def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for create_feed + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_saved_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + """Post-rpc interceptor for create_saved_query + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -238,6 +418,14 @@ def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Se """ return request, metadata + def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_saved_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for export_assets @@ -265,6 +453,22 @@ def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for get_feed + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_saved_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + """Post-rpc interceptor for get_saved_query + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -297,6 +501,38 @@ def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequ def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: """Post-rpc interceptor for list_feeds + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_saved_queries + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_list_saved_queries(self, response: asset_service.ListSavedQueriesResponse) -> asset_service.ListSavedQueriesResponse: + """Post-rpc interceptor for list_saved_queries + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for query_assets + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asset_service.QueryAssetsResponse: + """Post-rpc interceptor for query_assets + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -350,6 +586,43 @@ def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: it is returned to user code. """ return response + def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_saved_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + """Post-rpc interceptor for update_saved_query + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response @dataclasses.dataclass @@ -462,6 +735,12 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=*/*/operations/*/**}', + }, + ], } rest_transport = operations_v1.OperationsRestTransport( @@ -644,28 +923,29 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) return resp - class _BatchGetAssetsHistory(AssetServiceRestStub): + class _AnalyzeMove(AssetServiceRestStub): def __hash__(self): - return hash("BatchGetAssetsHistory") + return hash("AnalyzeMove") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + "destinationParent" : "", } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.BatchGetAssetsHistoryRequest, *, + request: asset_service.AnalyzeMoveRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.BatchGetAssetsHistoryResponse: - r"""Call the batch get assets history method over HTTP. + ) -> asset_service.AnalyzeMoveResponse: + r"""Call the analyze move method over HTTP. Args: - request (~.asset_service.BatchGetAssetsHistoryRequest): - The request object. Batch get assets history request. + request (~.asset_service.AnalyzeMoveRequest): + The request object. The request message for performing + resource move analysis. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -673,17 +953,19 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.BatchGetAssetsHistoryResponse: - Batch get assets history response. + ~.asset_service.AnalyzeMoveResponse: + The response message for resource + move analysis. + """ http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', + 'uri': '/v1/{resource=*/*}:analyzeMove', }, ] - request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) - pb_request = asset_service.BatchGetAssetsHistoryRequest.pb(request) + request, metadata = self._interceptor.pre_analyze_move(request, metadata) + pb_request = asset_service.AnalyzeMoveRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request['uri'] @@ -713,35 +995,36 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.BatchGetAssetsHistoryResponse() - pb_resp = asset_service.BatchGetAssetsHistoryResponse.pb(resp) + resp = asset_service.AnalyzeMoveResponse() + pb_resp = asset_service.AnalyzeMoveResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_batch_get_assets_history(resp) + resp = self._interceptor.post_analyze_move(resp) return resp - class _CreateFeed(AssetServiceRestStub): + class _AnalyzeOrgPolicies(AssetServiceRestStub): def __hash__(self): - return hash("CreateFeed") + return hash("AnalyzeOrgPolicies") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + "constraint" : "", } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.CreateFeedRequest, *, + request: asset_service.AnalyzeOrgPoliciesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.Feed: - r"""Call the create feed method over HTTP. + ) -> asset_service.AnalyzeOrgPoliciesResponse: + r"""Call the analyze org policies method over HTTP. Args: - request (~.asset_service.CreateFeedRequest): - The request object. Create asset feed request. + request (~.asset_service.AnalyzeOrgPoliciesRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -749,35 +1032,21 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. + ~.asset_service.AnalyzeOrgPoliciesResponse: + The response message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. """ http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/feeds', - 'body': '*', + 'method': 'get', + 'uri': '/v1/{scope=*/*}:analyzeOrgPolicies', }, ] - request, metadata = self._interceptor.pre_create_feed(request, metadata) - pb_request = asset_service.CreateFeedRequest.pb(request) + request, metadata = self._interceptor.pre_analyze_org_policies(request, metadata) + pb_request = asset_service.AnalyzeOrgPoliciesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -797,7 +1066,6 @@ def __call__(self, timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -806,16 +1074,576 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) + resp = asset_service.AnalyzeOrgPoliciesResponse() + pb_resp = asset_service.AnalyzeOrgPoliciesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_feed(resp) + resp = self._interceptor.post_analyze_org_policies(resp) + return resp + + class _AnalyzeOrgPolicyGovernedAssets(AssetServiceRestStub): + def __hash__(self): + return hash("AnalyzeOrgPolicyGovernedAssets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + r"""Call the analyze org policy + governed assets method over HTTP. + + Args: + request (~.asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets', + }, + ] + request, metadata = self._interceptor.pre_analyze_org_policy_governed_assets(request, metadata) + pb_request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + pb_resp = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) + return resp + + class _AnalyzeOrgPolicyGovernedContainers(AssetServiceRestStub): + def __hash__(self): + return hash("AnalyzeOrgPolicyGovernedContainers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + r"""Call the analyze org policy + governed containers method over HTTP. + + Args: + request (~.asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers', + }, + ] + request, metadata = self._interceptor.pre_analyze_org_policy_governed_containers(request, metadata) + pb_request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + pb_resp = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) + return resp + + class _BatchGetAssetsHistory(AssetServiceRestStub): + def __hash__(self): + return hash("BatchGetAssetsHistory") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.BatchGetAssetsHistoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.BatchGetAssetsHistoryResponse: + r"""Call the batch get assets history method over HTTP. + + Args: + request (~.asset_service.BatchGetAssetsHistoryRequest): + The request object. Batch get assets history request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.BatchGetAssetsHistoryResponse: + Batch get assets history response. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', + }, + ] + request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) + pb_request = asset_service.BatchGetAssetsHistoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.BatchGetAssetsHistoryResponse() + pb_resp = asset_service.BatchGetAssetsHistoryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_assets_history(resp) + return resp + + class _BatchGetEffectiveIamPolicies(AssetServiceRestStub): + def __hash__(self): + return hash("BatchGetEffectiveIamPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "names" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.BatchGetEffectiveIamPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + r"""Call the batch get effective iam + policies method over HTTP. + + Args: + request (~.asset_service.BatchGetEffectiveIamPoliciesRequest): + The request object. A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.BatchGetEffectiveIamPoliciesResponse: + A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}/effectiveIamPolicies:batchGet', + }, + ] + request, metadata = self._interceptor.pre_batch_get_effective_iam_policies(request, metadata) + pb_request = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.BatchGetEffectiveIamPoliciesResponse() + pb_resp = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_effective_iam_policies(resp) + return resp + + class _CreateFeed(AssetServiceRestStub): + def __hash__(self): + return hash("CreateFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.CreateFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.Feed: + r"""Call the create feed method over HTTP. + + Args: + request (~.asset_service.CreateFeedRequest): + The request object. Create asset feed request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + + Pub/Sub topics. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}/feeds', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_feed(request, metadata) + pb_request = asset_service.CreateFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.Feed() + pb_resp = asset_service.Feed.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_feed(resp) + return resp + + class _CreateSavedQuery(AssetServiceRestStub): + def __hash__(self): + return hash("CreateSavedQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "savedQueryId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.CreateSavedQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.SavedQuery: + r"""Call the create saved query method over HTTP. + + Args: + request (~.asset_service.CreateSavedQueryRequest): + The request object. Request to create a saved query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}/savedQueries', + 'body': 'saved_query', + }, + ] + request, metadata = self._interceptor.pre_create_saved_query(request, metadata) + pb_request = asset_service.CreateSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.SavedQuery() + pb_resp = asset_service.SavedQuery.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_saved_query(resp) return resp class _DeleteFeed(AssetServiceRestStub): def __hash__(self): - return hash("DeleteFeed") + return hash("DeleteFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.DeleteFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete feed method over HTTP. + + Args: + request (~.asset_service.DeleteFeedRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=*/*/feeds/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_feed(request, metadata) + pb_request = asset_service.DeleteFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteSavedQuery(AssetServiceRestStub): + def __hash__(self): + return hash("DeleteSavedQuery") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @@ -825,16 +1653,16 @@ def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.DeleteFeedRequest, *, + request: asset_service.DeleteSavedQueryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), ): - r"""Call the delete feed method over HTTP. + r"""Call the delete saved query method over HTTP. Args: - request (~.asset_service.DeleteFeedRequest): - The request object. + request (~.asset_service.DeleteSavedQueryRequest): + The request object. Request to delete a saved query. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -844,11 +1672,11 @@ def __call__(self, http_options: List[Dict[str, str]] = [{ 'method': 'delete', - 'uri': '/v1/{name=*/*/feeds/*}', + 'uri': '/v1/{name=*/*/savedQueries/*}', }, ] - request, metadata = self._interceptor.pre_delete_feed(request, metadata) - pb_request = asset_service.DeleteFeedRequest.pb(request) + request, metadata = self._interceptor.pre_delete_saved_query(request, metadata) + pb_request = asset_service.DeleteSavedQueryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request['uri'] @@ -919,17 +1747,254 @@ def __call__(self, 'body': '*', }, ] - request, metadata = self._interceptor.pre_export_assets(request, metadata) - pb_request = asset_service.ExportAssetsRequest.pb(request) + request, metadata = self._interceptor.pre_export_assets(request, metadata) + pb_request = asset_service.ExportAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_assets(resp) + return resp + + class _GetFeed(AssetServiceRestStub): + def __hash__(self): + return hash("GetFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.GetFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.Feed: + r"""Call the get feed method over HTTP. + + Args: + request (~.asset_service.GetFeedRequest): + The request object. Get asset feed request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + + Pub/Sub topics. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/feeds/*}', + }, + ] + request, metadata = self._interceptor.pre_get_feed(request, metadata) + pb_request = asset_service.GetFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.Feed() + pb_resp = asset_service.Feed.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_feed(resp) + return resp + + class _GetSavedQuery(AssetServiceRestStub): + def __hash__(self): + return hash("GetSavedQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.GetSavedQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.SavedQuery: + r"""Call the get saved query method over HTTP. + + Args: + request (~.asset_service.GetSavedQueryRequest): + The request object. Request to get a saved query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/savedQueries/*}', + }, + ] + request, metadata = self._interceptor.pre_get_saved_query(request, metadata) + pb_request = asset_service.GetSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.SavedQuery() + pb_resp = asset_service.SavedQuery.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_saved_query(resp) + return resp + + class _ListAssets(AssetServiceRestStub): + def __hash__(self): + return hash("ListAssets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.ListAssetsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.ListAssetsResponse: + r"""Call the list assets method over HTTP. + + Args: + request (~.asset_service.ListAssetsRequest): + The request object. ListAssets request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.ListAssetsResponse: + ListAssets response. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}/assets', + }, + ] + request, metadata = self._interceptor.pre_list_assets(request, metadata) + pb_request = asset_service.ListAssetsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -949,7 +2014,6 @@ def __call__(self, timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -958,14 +2022,16 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_assets(resp) + resp = asset_service.ListAssetsResponse() + pb_resp = asset_service.ListAssetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_assets(resp) return resp - class _GetFeed(AssetServiceRestStub): + class _ListFeeds(AssetServiceRestStub): def __hash__(self): - return hash("GetFeed") + return hash("ListFeeds") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @@ -975,16 +2041,16 @@ def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.GetFeedRequest, *, + request: asset_service.ListFeedsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.Feed: - r"""Call the get feed method over HTTP. + ) -> asset_service.ListFeedsResponse: + r"""Call the list feeds method over HTTP. Args: - request (~.asset_service.GetFeedRequest): - The request object. Get asset feed request. + request (~.asset_service.ListFeedsRequest): + The request object. List asset feeds request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -992,25 +2058,17 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. + ~.asset_service.ListFeedsResponse: """ http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{name=*/*/feeds/*}', + 'uri': '/v1/{parent=*/*}/feeds', }, ] - request, metadata = self._interceptor.pre_get_feed(request, metadata) - pb_request = asset_service.GetFeedRequest.pb(request) + request, metadata = self._interceptor.pre_list_feeds(request, metadata) + pb_request = asset_service.ListFeedsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request['uri'] @@ -1040,16 +2098,16 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) + resp = asset_service.ListFeedsResponse() + pb_resp = asset_service.ListFeedsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_feed(resp) + resp = self._interceptor.post_list_feeds(resp) return resp - class _ListAssets(AssetServiceRestStub): + class _ListSavedQueries(AssetServiceRestStub): def __hash__(self): - return hash("ListAssets") + return hash("ListSavedQueries") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @@ -1059,16 +2117,16 @@ def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.ListAssetsRequest, *, + request: asset_service.ListSavedQueriesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.ListAssetsResponse: - r"""Call the list assets method over HTTP. + ) -> asset_service.ListSavedQueriesResponse: + r"""Call the list saved queries method over HTTP. Args: - request (~.asset_service.ListAssetsRequest): - The request object. ListAssets request. + request (~.asset_service.ListSavedQueriesRequest): + The request object. Request to list saved queries. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1076,17 +2134,17 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.ListAssetsResponse: - ListAssets response. + ~.asset_service.ListSavedQueriesResponse: + Response of listing saved queries. """ http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{parent=*/*}/assets', + 'uri': '/v1/{parent=*/*}/savedQueries', }, ] - request, metadata = self._interceptor.pre_list_assets(request, metadata) - pb_request = asset_service.ListAssetsRequest.pb(request) + request, metadata = self._interceptor.pre_list_saved_queries(request, metadata) + pb_request = asset_service.ListSavedQueriesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request['uri'] @@ -1116,16 +2174,16 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.ListAssetsResponse() - pb_resp = asset_service.ListAssetsResponse.pb(resp) + resp = asset_service.ListSavedQueriesResponse() + pb_resp = asset_service.ListSavedQueriesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_assets(resp) + resp = self._interceptor.post_list_saved_queries(resp) return resp - class _ListFeeds(AssetServiceRestStub): + class _QueryAssets(AssetServiceRestStub): def __hash__(self): - return hash("ListFeeds") + return hash("QueryAssets") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @@ -1135,16 +2193,16 @@ def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.ListFeedsRequest, *, + request: asset_service.QueryAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.ListFeedsResponse: - r"""Call the list feeds method over HTTP. + ) -> asset_service.QueryAssetsResponse: + r"""Call the query assets method over HTTP. Args: - request (~.asset_service.ListFeedsRequest): - The request object. List asset feeds request. + request (~.asset_service.QueryAssetsRequest): + The request object. QueryAssets request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1152,19 +2210,27 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.ListFeedsResponse: - + ~.asset_service.QueryAssetsResponse: + QueryAssets response. """ http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/feeds', + 'method': 'post', + 'uri': '/v1/{parent=*/*}:queryAssets', + 'body': '*', }, ] - request, metadata = self._interceptor.pre_list_feeds(request, metadata) - pb_request = asset_service.ListFeedsRequest.pb(request) + request, metadata = self._interceptor.pre_query_assets(request, metadata) + pb_request = asset_service.QueryAssetsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -1184,6 +2250,7 @@ def __call__(self, timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1192,11 +2259,11 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.ListFeedsResponse() - pb_resp = asset_service.ListFeedsResponse.pb(resp) + resp = asset_service.QueryAssetsResponse() + pb_resp = asset_service.QueryAssetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_feeds(resp) + resp = self._interceptor.post_query_assets(resp) return resp class _SearchAllIamPolicies(AssetServiceRestStub): @@ -1444,6 +2511,93 @@ def __call__(self, resp = self._interceptor.post_update_feed(resp) return resp + class _UpdateSavedQuery(AssetServiceRestStub): + def __hash__(self): + return hash("UpdateSavedQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.UpdateSavedQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.SavedQuery: + r"""Call the update saved query method over HTTP. + + Args: + request (~.asset_service.UpdateSavedQueryRequest): + The request object. Request to update a saved query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{saved_query.name=*/*/savedQueries/*}', + 'body': 'saved_query', + }, + ] + request, metadata = self._interceptor.pre_update_saved_query(request, metadata) + pb_request = asset_service.UpdateSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.SavedQuery() + pb_resp = asset_service.SavedQuery.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_saved_query(resp) + return resp + @property def analyze_iam_policy(self) -> Callable[ [asset_service.AnalyzeIamPolicyRequest], @@ -1460,6 +2614,38 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # In C++ this would require a dynamic_cast return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore + @property + def analyze_move(self) -> Callable[ + [asset_service.AnalyzeMoveRequest], + asset_service.AnalyzeMoveResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_org_policies(self) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + asset_service.AnalyzeOrgPoliciesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_org_policy_governed_assets(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_org_policy_governed_containers(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore + @property def batch_get_assets_history(self) -> Callable[ [asset_service.BatchGetAssetsHistoryRequest], @@ -1468,6 +2654,14 @@ def batch_get_assets_history(self) -> Callable[ # In C++ this would require a dynamic_cast return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore + @property + def batch_get_effective_iam_policies(self) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + asset_service.BatchGetEffectiveIamPoliciesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore + @property def create_feed(self) -> Callable[ [asset_service.CreateFeedRequest], @@ -1476,6 +2670,14 @@ def create_feed(self) -> Callable[ # In C++ this would require a dynamic_cast return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore + @property + def create_saved_query(self) -> Callable[ + [asset_service.CreateSavedQueryRequest], + asset_service.SavedQuery]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + @property def delete_feed(self) -> Callable[ [asset_service.DeleteFeedRequest], @@ -1484,6 +2686,14 @@ def delete_feed(self) -> Callable[ # In C++ this would require a dynamic_cast return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_saved_query(self) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore + @property def export_assets(self) -> Callable[ [asset_service.ExportAssetsRequest], @@ -1500,6 +2710,14 @@ def get_feed(self) -> Callable[ # In C++ this would require a dynamic_cast return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore + @property + def get_saved_query(self) -> Callable[ + [asset_service.GetSavedQueryRequest], + asset_service.SavedQuery]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore + @property def list_assets(self) -> Callable[ [asset_service.ListAssetsRequest], @@ -1516,6 +2734,22 @@ def list_feeds(self) -> Callable[ # In C++ this would require a dynamic_cast return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore + @property + def list_saved_queries(self) -> Callable[ + [asset_service.ListSavedQueriesRequest], + asset_service.ListSavedQueriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_assets(self) -> Callable[ + [asset_service.QueryAssetsRequest], + asset_service.QueryAssetsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore + @property def search_all_iam_policies(self) -> Callable[ [asset_service.SearchAllIamPoliciesRequest], @@ -1540,6 +2774,79 @@ def update_feed(self) -> Callable[ # In C++ this would require a dynamic_cast return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore + @property + def update_saved_query(self) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + asset_service.SavedQuery]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AssetServiceRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/operations/*/**}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index 893bf91636..24f59fb0cc 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -14,15 +14,30 @@ # limitations under the License. # from .asset_service import ( + AnalyzeIamPolicyLongrunningMetadata, AnalyzeIamPolicyLongrunningRequest, AnalyzeIamPolicyLongrunningResponse, AnalyzeIamPolicyRequest, AnalyzeIamPolicyResponse, + AnalyzeMoveRequest, + AnalyzeMoveResponse, + AnalyzeOrgPoliciesRequest, + AnalyzeOrgPoliciesResponse, + AnalyzeOrgPolicyGovernedAssetsRequest, + AnalyzeOrgPolicyGovernedAssetsResponse, + AnalyzeOrgPolicyGovernedContainersRequest, + AnalyzeOrgPolicyGovernedContainersResponse, + AnalyzerOrgPolicy, + AnalyzerOrgPolicyConstraint, BatchGetAssetsHistoryRequest, BatchGetAssetsHistoryResponse, + BatchGetEffectiveIamPoliciesRequest, + BatchGetEffectiveIamPoliciesResponse, BigQueryDestination, CreateFeedRequest, + CreateSavedQueryRequest, DeleteFeedRequest, + DeleteSavedQueryRequest, ExportAssetsRequest, ExportAssetsResponse, Feed, @@ -30,45 +45,81 @@ GcsDestination, GcsOutputResult, GetFeedRequest, + GetSavedQueryRequest, IamPolicyAnalysisOutputConfig, IamPolicyAnalysisQuery, ListAssetsRequest, ListAssetsResponse, ListFeedsRequest, ListFeedsResponse, + ListSavedQueriesRequest, + ListSavedQueriesResponse, + MoveAnalysis, + MoveAnalysisResult, + MoveImpact, OutputConfig, OutputResult, PartitionSpec, PubsubDestination, + QueryAssetsOutputConfig, + QueryAssetsRequest, + QueryAssetsResponse, + QueryResult, + SavedQuery, SearchAllIamPoliciesRequest, SearchAllIamPoliciesResponse, SearchAllResourcesRequest, SearchAllResourcesResponse, + TableFieldSchema, + TableSchema, UpdateFeedRequest, + UpdateSavedQueryRequest, ContentType, ) from .assets import ( Asset, + AttachedResource, ConditionEvaluation, IamPolicyAnalysisResult, IamPolicyAnalysisState, IamPolicySearchResult, + RelatedAsset, + RelatedAssets, + RelatedResource, + RelatedResources, + RelationshipAttributes, Resource, ResourceSearchResult, TemporalAsset, TimeWindow, + VersionedResource, ) __all__ = ( + 'AnalyzeIamPolicyLongrunningMetadata', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', 'AnalyzeIamPolicyRequest', 'AnalyzeIamPolicyResponse', + 'AnalyzeMoveRequest', + 'AnalyzeMoveResponse', + 'AnalyzeOrgPoliciesRequest', + 'AnalyzeOrgPoliciesResponse', + 'AnalyzeOrgPolicyGovernedAssetsRequest', + 'AnalyzeOrgPolicyGovernedAssetsResponse', + 'AnalyzeOrgPolicyGovernedContainersRequest', + 'AnalyzeOrgPolicyGovernedContainersResponse', + 'AnalyzerOrgPolicy', + 'AnalyzerOrgPolicyConstraint', 'BatchGetAssetsHistoryRequest', 'BatchGetAssetsHistoryResponse', + 'BatchGetEffectiveIamPoliciesRequest', + 'BatchGetEffectiveIamPoliciesResponse', 'BigQueryDestination', 'CreateFeedRequest', + 'CreateSavedQueryRequest', 'DeleteFeedRequest', + 'DeleteSavedQueryRequest', 'ExportAssetsRequest', 'ExportAssetsResponse', 'Feed', @@ -76,29 +127,50 @@ 'GcsDestination', 'GcsOutputResult', 'GetFeedRequest', + 'GetSavedQueryRequest', 'IamPolicyAnalysisOutputConfig', 'IamPolicyAnalysisQuery', 'ListAssetsRequest', 'ListAssetsResponse', 'ListFeedsRequest', 'ListFeedsResponse', + 'ListSavedQueriesRequest', + 'ListSavedQueriesResponse', + 'MoveAnalysis', + 'MoveAnalysisResult', + 'MoveImpact', 'OutputConfig', 'OutputResult', 'PartitionSpec', 'PubsubDestination', + 'QueryAssetsOutputConfig', + 'QueryAssetsRequest', + 'QueryAssetsResponse', + 'QueryResult', + 'SavedQuery', 'SearchAllIamPoliciesRequest', 'SearchAllIamPoliciesResponse', 'SearchAllResourcesRequest', 'SearchAllResourcesResponse', + 'TableFieldSchema', + 'TableSchema', 'UpdateFeedRequest', + 'UpdateSavedQueryRequest', 'ContentType', 'Asset', + 'AttachedResource', 'ConditionEvaluation', 'IamPolicyAnalysisResult', 'IamPolicyAnalysisState', 'IamPolicySearchResult', + 'RelatedAsset', + 'RelatedAssets', + 'RelatedResource', + 'RelatedResources', + 'RelationshipAttributes', 'Resource', 'ResourceSearchResult', 'TemporalAsset', 'TimeWindow', + 'VersionedResource', ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index af29c724b9..00ef45dfce 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -20,9 +20,12 @@ import proto # type: ignore from google.cloud.asset_v1.types import assets as gca_assets +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore @@ -30,6 +33,7 @@ package='google.cloud.asset.v1', manifest={ 'ContentType', + 'AnalyzeIamPolicyLongrunningMetadata', 'ExportAssetsRequest', 'ExportAssetsResponse', 'ListAssetsRequest', @@ -61,6 +65,34 @@ 'IamPolicyAnalysisOutputConfig', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', + 'SavedQuery', + 'CreateSavedQueryRequest', + 'GetSavedQueryRequest', + 'ListSavedQueriesRequest', + 'ListSavedQueriesResponse', + 'UpdateSavedQueryRequest', + 'DeleteSavedQueryRequest', + 'AnalyzeMoveRequest', + 'AnalyzeMoveResponse', + 'MoveAnalysis', + 'MoveAnalysisResult', + 'MoveImpact', + 'QueryAssetsOutputConfig', + 'QueryAssetsRequest', + 'QueryAssetsResponse', + 'QueryResult', + 'TableSchema', + 'TableFieldSchema', + 'BatchGetEffectiveIamPoliciesRequest', + 'BatchGetEffectiveIamPoliciesResponse', + 'AnalyzerOrgPolicy', + 'AnalyzerOrgPolicyConstraint', + 'AnalyzeOrgPoliciesRequest', + 'AnalyzeOrgPoliciesResponse', + 'AnalyzeOrgPolicyGovernedContainersRequest', + 'AnalyzeOrgPolicyGovernedContainersResponse', + 'AnalyzeOrgPolicyGovernedAssetsRequest', + 'AnalyzeOrgPolicyGovernedAssetsResponse', }, ) @@ -76,13 +108,14 @@ class ContentType(proto.Enum): IAM_POLICY (2): The actual IAM policy set on a resource. ORG_POLICY (4): - The Cloud Organization Policy set on an - asset. + The organization policy set on an asset. ACCESS_POLICY (5): - The Cloud Access context manager Policy set - on an asset. + The Access Context Manager policy set on an + asset. OS_INVENTORY (6): The runtime OS Inventory information. + RELATIONSHIP (7): + The related resources. """ CONTENT_TYPE_UNSPECIFIED = 0 RESOURCE = 1 @@ -90,6 +123,24 @@ class ContentType(proto.Enum): ORG_POLICY = 4 ACCESS_POLICY = 5 OS_INVENTORY = 6 + RELATIONSHIP = 7 + + +class AnalyzeIamPolicyLongrunningMetadata(proto.Message): + r"""Represents the metadata of the longrunning operation for the + AnalyzeIamPolicyLongrunning RPC. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) class ExportAssetsRequest(proto.Message): @@ -141,6 +192,23 @@ class ExportAssetsRequest(proto.Message): output_config (google.cloud.asset_v1.types.OutputConfig): Required. Output configuration indicating where the results will be output to. + relationship_types (MutableSequence[str]): + A list of relationship types to export, for example: + ``INSTANCE_TO_INSTANCEGROUP``. This field should only be + specified if content_type=RELATIONSHIP. + + - If specified: it snapshots specified relationships. It + returns an error if any of the [relationship_types] + doesn't belong to the supported relationship types of the + [asset_types] or if any of the [asset_types] doesn't + belong to the source types of the [relationship_types]. + - Otherwise: it snapshots the supported relationships for + all [asset_types] or returns an error if any of the + [asset_types] has no relationship support. An unspecified + asset types field means all supported asset_types. See + `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ parent: str = proto.Field( @@ -166,6 +234,10 @@ class ExportAssetsRequest(proto.Message): number=5, message='OutputConfig', ) + relationship_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) class ExportAssetsResponse(proto.Message): @@ -183,11 +255,11 @@ class ExportAssetsResponse(proto.Message): results were output to. output_result (google.cloud.asset_v1.types.OutputResult): Output result indicating where the assets were exported to. - For example, a set of actual Google Cloud Storage object - uris where the assets are exported to. The uris can be - different from what [output_config] has specified, as the - service will split the output object into multiple ones once - it exceeds a single Google Cloud Storage object limit. + For example, a set of actual Cloud Storage object URIs where + the assets are exported to. The URIs can be different from + what [output_config] has specified, as the service will + split the output object into multiple ones once it exceeds a + single Cloud Storage object limit. """ read_time: timestamp_pb2.Timestamp = proto.Field( @@ -212,11 +284,13 @@ class ListAssetsRequest(proto.Message): Attributes: parent (str): - Required. Name of the organization or project the assets - belong to. Format: "organizations/[organization-number]" - (such as "organizations/123"), "projects/[project-id]" (such - as "projects/my-project-id"), or "projects/[project-number]" - (such as "projects/12345"). + Required. Name of the organization, folder, or project the + assets belong to. Format: + "organizations/[organization-number]" (such as + "organizations/123"), "projects/[project-id]" (such as + "projects/my-project-id"), "projects/[project-number]" (such + as "projects/12345"), or "folders/[folder-number]" (such as + "folders/12345"). read_time (google.protobuf.timestamp_pb2.Timestamp): Timestamp to take an asset snapshot. This can only be set to a timestamp between the current @@ -262,6 +336,23 @@ class ListAssetsRequest(proto.Message): ``ListAssetsRequest``. It is a continuation of a prior ``ListAssets`` call, and the API should return the next page of assets. + relationship_types (MutableSequence[str]): + A list of relationship types to output, for example: + ``INSTANCE_TO_INSTANCEGROUP``. This field should only be + specified if content_type=RELATIONSHIP. + + - If specified: it snapshots specified relationships. It + returns an error if any of the [relationship_types] + doesn't belong to the supported relationship types of the + [asset_types] or if any of the [asset_types] doesn't + belong to the source types of the [relationship_types]. + - Otherwise: it snapshots the supported relationships for + all [asset_types] or returns an error if any of the + [asset_types] has no relationship support. An unspecified + asset types field means all supported asset_types. See + `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ parent: str = proto.Field( @@ -290,6 +381,10 @@ class ListAssetsRequest(proto.Message): proto.STRING, number=6, ) + relationship_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) class ListAssetsResponse(proto.Message): @@ -357,6 +452,23 @@ class BatchGetAssetsHistoryRequest(proto.Message): not set, the snapshot of the assets at end_time will be returned. The returned results contain all temporal assets whose time window overlap with read_time_window. + relationship_types (MutableSequence[str]): + Optional. A list of relationship types to output, for + example: ``INSTANCE_TO_INSTANCEGROUP``. This field should + only be specified if content_type=RELATIONSHIP. + + - If specified: it outputs specified relationships' history + on the [asset_names]. It returns an error if any of the + [relationship_types] doesn't belong to the supported + relationship types of the [asset_names] or if any of the + [asset_names]'s types doesn't belong to the source types + of the [relationship_types]. + - Otherwise: it outputs the supported relationships' + history on the [asset_names] or returns an error if any + of the [asset_names]'s types has no relationship support. + See `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ parent: str = proto.Field( @@ -377,6 +489,10 @@ class BatchGetAssetsHistoryRequest(proto.Message): number=4, message=gca_assets.TimeWindow, ) + relationship_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) class BatchGetAssetsHistoryResponse(proto.Message): @@ -405,7 +521,7 @@ class CreateFeedRequest(proto.Message): organization number (such as "organizations/123"), a folder number (such as "folders/123"), a project ID (such as - "projects/my-project-id")", or a project number + "projects/my-project-id"), or a project number (such as "projects/12345"). feed_id (str): Required. This is the client-assigned asset @@ -547,8 +663,8 @@ class OutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. bigquery_destination (google.cloud.asset_v1.types.BigQueryDestination): Destination on BigQuery. The output table - stores the fields in asset proto as columns in - BigQuery. + stores the fields in asset Protobuf as columns + in BigQuery. This field is a member of `oneof`_ ``destination``. """ @@ -592,7 +708,7 @@ class GcsOutputResult(proto.Message): Attributes: uris (MutableSequence[str]): - List of uris of the Cloud Storage objects. Example: + List of URIs of the Cloud Storage objects. Example: "gs://bucket_name/object_name". """ @@ -614,7 +730,7 @@ class GcsDestination(proto.Message): Attributes: uri (str): - The uri of the Cloud Storage object. It's the same uri that + The URI of the Cloud Storage object. It's the same URI that is used by gsutil. Example: "gs://bucket_name/object_name". See `Viewing and Editing Object Metadata `__ @@ -627,9 +743,9 @@ class GcsDestination(proto.Message): This field is a member of `oneof`_ ``object_uri``. uri_prefix (str): - The uri prefix of all generated Cloud Storage objects. + The URI prefix of all generated Cloud Storage objects. Example: "gs://bucket_name/object_name_prefix". Each object - uri is in format: "gs://bucket_name/object_name_prefix// and + URI is in format: "gs://bucket_name/object_name_prefix// and only contains assets for that type. starts from 0. Example: "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is the first shard of output objects containing all @@ -661,6 +777,12 @@ class BigQueryDestination(proto.Message): "projects/projectId/datasets/datasetId", to which the snapshot result should be exported. If this dataset does not exist, the export call returns an INVALID_ARGUMENT error. + Setting the ``contentType`` for ``exportAssets`` determines + the + `schema `__ + of the BigQuery table. Setting + ``separateTablesPerAssetType`` to ``TRUE`` also influences + the schema. table (str): Required. The BigQuery table to which the snapshot result should be written. If this table @@ -860,9 +982,9 @@ class Feed(proto.Message): asset_names or asset_types are exported to the feed. Example: ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. - See `Resource - Names `__ - for more info. + For a list of the full names for supported asset types, see + `Resource name + format `__. asset_types (MutableSequence[str]): A list of types of the assets to receive updates. You must specify either or both of asset_names and asset_types. Only @@ -870,9 +992,9 @@ class Feed(proto.Message): are exported to the feed. Example: ``"compute.googleapis.com/Disk"`` - See `this - topic `__ - for a list of all supported asset types. + For a list of all supported asset types, see `Supported + asset + types `__. content_type (google.cloud.asset_v1.types.ContentType): Asset content type. If not specified, no content but the asset name and type will be @@ -892,8 +1014,27 @@ class Feed(proto.Message): optional. See our `user - guide `__ + guide `__ for detailed instructions. + relationship_types (MutableSequence[str]): + A list of relationship types to output, for example: + ``INSTANCE_TO_INSTANCEGROUP``. This field should only be + specified if content_type=RELATIONSHIP. + + - If specified: it outputs specified relationship updates + on the [asset_names] or the [asset_types]. It returns an + error if any of the [relationship_types] doesn't belong + to the supported relationship types of the [asset_names] + or [asset_types], or any of the [asset_names] or the + [asset_types] doesn't belong to the source types of the + [relationship_types]. + - Otherwise: it outputs the supported relationships of the + types of [asset_names] and [asset_types] or returns an + error if any of the [asset_names] or the [asset_types] + has no replationship support. See `Introduction to Cloud + Asset + Inventory `__ + for all supported asset types and relationship types. """ name: str = proto.Field( @@ -923,6 +1064,10 @@ class Feed(proto.Message): number=6, message=expr_pb2.Expr, ) + relationship_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) class SearchAllResourcesRequest(proto.Message): @@ -951,44 +1096,61 @@ class SearchAllResourcesRequest(proto.Message): Examples: - - ``name:Important`` to find Cloud resources whose name - contains "Important" as a word. - - ``name=Important`` to find the Cloud resource whose name - is exactly "Important". - - ``displayName:Impor*`` to find Cloud resources whose - display name contains "Impor" as a prefix of any word in - the field. - - ``location:us-west*`` to find Cloud resources whose - location contains both "us" and "west" as prefixes. - - ``labels:prod`` to find Cloud resources whose labels - contain "prod" as a key or value. - - ``labels.env:prod`` to find Cloud resources that have a - label "env" and its value is "prod". - - ``labels.env:*`` to find Cloud resources that have a - label "env". - - ``kmsKey:key`` to find Cloud resources encrypted with a - customer-managed encryption key whose name contains the - word "key". - - ``state:ACTIVE`` to find Cloud resources whose state - contains "ACTIVE" as a word. - - ``NOT state:ACTIVE`` to find {{gcp_name}} resources whose + - ``name:Important`` to find Google Cloud resources whose + name contains "Important" as a word. + - ``name=Important`` to find the Google Cloud resource + whose name is exactly "Important". + - ``displayName:Impor*`` to find Google Cloud resources + whose display name contains "Impor" as a prefix of any + word in the field. + - ``location:us-west*`` to find Google Cloud resources + whose location contains both "us" and "west" as prefixes. + - ``labels:prod`` to find Google Cloud resources whose + labels contain "prod" as a key or value. + - ``labels.env:prod`` to find Google Cloud resources that + have a label "env" and its value is "prod". + - ``labels.env:*`` to find Google Cloud resources that have + a label "env". + - ``kmsKey:key`` to find Google Cloud resources encrypted + with a customer-managed encryption key whose name + contains "key" as a word. This field is deprecated. + Please use the ``kmsKeys`` field to retrieve Cloud KMS + key information. + - ``kmsKeys:key`` to find Google Cloud resources encrypted + with customer-managed encryption keys whose name contains + the word "key". + - ``relationships:instance-group-1`` to find Google Cloud + resources that have relationships with "instance-group-1" + in the related resource name. + - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find + Compute Engine instances that have relationships of type + "INSTANCE_TO_INSTANCEGROUP". + - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` + to find Compute Engine instances that have relationships + with "instance-group-1" in the Compute Engine instance + group resource name, for relationship type + "INSTANCE_TO_INSTANCEGROUP". + - ``state:ACTIVE`` to find Google Cloud resources whose + state contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find Google Cloud resources whose state doesn't contain "ACTIVE" as a word. - - ``createTime<1609459200`` to find Cloud resources that - were created before "2021-01-01 00:00:00 UTC". 1609459200 - is the epoch timestamp of "2021-01-01 00:00:00 UTC" in - seconds. - - ``updateTime>1609459200`` to find Cloud resources that - were updated after "2021-01-01 00:00:00 UTC". 1609459200 - is the epoch timestamp of "2021-01-01 00:00:00 UTC" in - seconds. - - ``Important`` to find Cloud resources that contain + - ``createTime<1609459200`` to find Google Cloud resources + that were created before "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 00:00:00 + UTC" in seconds. + - ``updateTime>1609459200`` to find Google Cloud resources + that were updated after "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 00:00:00 + UTC" in seconds. + - ``Important`` to find Google Cloud resources that contain "Important" as a word in any of the searchable fields. - - ``Impor*`` to find Cloud resources that contain "Impor" - as a prefix of any word in any of the searchable fields. - - ``Important location:(us-west1 OR global)`` to find Cloud - resources that contain "Important" as a word in any of - the searchable fields and are also located in the - "us-west1" region or the "global" location. + - ``Impor*`` to find Google Cloud resources that contain + "Impor" as a prefix of any word in any of the searchable + fields. + - ``Important location:(us-west1 OR global)`` to find + Google Cloud resources that contain "Important" as a word + in any of the searchable fields and are also located in + the "us-west1" region or the "global" location. asset_types (MutableSequence[str]): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset @@ -1034,15 +1196,52 @@ class SearchAllResourcesRequest(proto.Message): - displayName - description - location - - kmsKey - createTime - updateTime - state - parentFullResourceName - - parentAssetType All the other fields such as repeated - fields (e.g., ``networkTags``), map fields (e.g., - ``labels``) and struct fields (e.g., - ``additionalAttributes``) are not supported. + - parentAssetType + + All the other fields such as repeated fields (e.g., + ``networkTags``, ``kmsKeys``), map fields (e.g., ``labels``) + and struct fields (e.g., ``additionalAttributes``) are not + supported. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. A comma-separated list of fields specifying which + fields to be returned in ResourceSearchResult. Only '*' or + combination of top level fields can be specified. Field + names of both snake_case and camelCase are supported. + Examples: ``"*"``, ``"name,location"``, + ``"name,versionedResources"``. + + The read_mask paths must be valid field paths listed but not + limited to (both snake_case and camelCase are supported): + + - name + - assetType + - project + - displayName + - description + - location + - tagKeys + - tagValues + - tagValueIds + - labels + - networkTags + - kmsKey (This field is deprecated. Please use the + ``kmsKeys`` field to retrieve Cloud KMS key information.) + - kmsKeys + - createTime + - updateTime + - state + - additionalAttributes + - versionedResources + + If read_mask is not specified, all fields except + versionedResources will be returned. If only '*' is + specified, all fields including versionedResources will be + returned. Any invalid field path will trigger + INVALID_ARGUMENT error. """ scope: str = proto.Field( @@ -1069,6 +1268,11 @@ class SearchAllResourcesRequest(proto.Message): proto.STRING, number=6, ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=8, + message=field_mask_pb2.FieldMask, + ) class SearchAllResourcesResponse(proto.Message): @@ -1124,12 +1328,12 @@ class SearchAllIamPoliciesRequest(proto.Message): query `__ for more information. If not specified or empty, it will search all the IAM policies within the specified ``scope``. - Note that the query string is compared against each Cloud - IAM policy binding, including its members, roles, and Cloud - IAM conditions. The returned Cloud IAM policies will only - contain the bindings that match your query. To learn more - about the IAM policy structure, see `IAM policy - doc `__. + Note that the query string is compared against each IAM + policy binding, including its principals, roles, and IAM + conditions. The returned IAM policies will only contain the + bindings that match your query. To learn more about the IAM + policy structure, see the `IAM policy + documentation `__. Examples: @@ -1165,7 +1369,7 @@ class SearchAllIamPoliciesRequest(proto.Message): - ``roles:roles/compute.admin`` to find IAM policy bindings that specify the Compute Admin role. - ``memberTypes:user`` to find IAM policy bindings that - contain the "user" member type. + contain the principal type "user". page_size (int): Optional. The page size for search result pagination. Page size is capped at 500 even if a larger value is given. If @@ -1245,7 +1449,7 @@ class SearchAllIamPoliciesResponse(proto.Message): Attributes: results (MutableSequence[google.cloud.asset_v1.types.IamPolicySearchResult]): - A list of IamPolicy that match the search + A list of IAM policies that match the search query. Related information such as the associated resource is returned along with the policy. @@ -1271,7 +1475,7 @@ def raw_page(self): class IamPolicyAnalysisQuery(proto.Message): - r"""## IAM policy analysis query message. + r"""IAM policy analysis query message. Attributes: scope (str): @@ -1329,8 +1533,8 @@ class IdentitySelector(proto.Message): Attributes: identity (str): - Required. The identity appear in the form of members in `IAM - policy + Required. The identity appear in the form of principals in + `IAM policy binding `__. The examples of supported forms are: @@ -1384,6 +1588,9 @@ class Options(proto.Message): is specified, the identity in the result will be determined by the selector, and this flag is not allowed to set. + If true, the default max expansion per group is 1000 for + AssetService.AnalyzeIamPolicy][]. + Default is false. expand_roles (bool): Optional. If true, the access section of result will expand @@ -1406,33 +1613,36 @@ class Options(proto.Message): For example, if the request analyzes for which resources user A has permission P, and the results include an IAM - policy with P on a GCP folder, the results will also include - resources in that folder with permission P. + policy with P on a Google Cloud folder, the results will + also include resources in that folder with permission P. If true and [IamPolicyAnalysisQuery.resource_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.resource_selector] is specified, the resource section of the result will expand the specified resource to include resources lower in the resource hierarchy. Only project or lower resources are - supported. Folder and organization resource cannot be used + supported. Folder and organization resources cannot be used together with this option. For example, if the request analyzes for which users have - permission P on a GCP project with this option enabled, the - results will include all users who have permission P on that - project or any lower resource. + permission P on a Google Cloud project with this option + enabled, the results will include all users who have + permission P on that project or any lower resource. + + If true, the default max expansion per resource is 1000 for + AssetService.AnalyzeIamPolicy][] and 100000 for + AssetService.AnalyzeIamPolicyLongrunning][]. Default is false. output_resource_edges (bool): - Optional. If true, the result will output - resource edges, starting from the policy - attached resource, to any expanded resources. - Default is false. + Optional. If true, the result will output the + relevant parent/child relationships between + resources. Default is false. output_group_edges (bool): - Optional. If true, the result will output - group identity edges, starting from the - binding's group members, to any expanded - identities. Default is false. + Optional. If true, the result will output the + relevant membership relationships between groups + and other groups, and between groups and + principals. Default is false. analyze_service_account_impersonation (bool): Optional. If true, the response will include access analysis from identities to resources via service account @@ -1440,26 +1650,38 @@ class Options(proto.Message): many derived queries will be executed. We highly recommend you use [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning] - rpc instead. + RPC instead. For example, if the request analyzes for which resources user A has permission P, and there's an IAM policy states user A has iam.serviceAccounts.getAccessToken permission to a service account SA, and there's another IAM policy states - service account SA has permission P to a GCP folder F, then - user A potentially has access to the GCP folder F. And those - advanced analysis results will be included in + service account SA has permission P to a Google Cloud folder + F, then user A potentially has access to the Google Cloud + folder F. And those advanced analysis results will be + included in [AnalyzeIamPolicyResponse.service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis]. Another example, if the request analyzes for who has - permission P to a GCP folder F, and there's an IAM policy - states user A has iam.serviceAccounts.actAs permission to a - service account SA, and there's another IAM policy states - service account SA has permission P to the GCP folder F, - then user A potentially has access to the GCP folder F. And - those advanced analysis results will be included in + permission P to a Google Cloud folder F, and there's an IAM + policy states user A has iam.serviceAccounts.actAs + permission to a service account SA, and there's another IAM + policy states service account SA has permission P to the + Google Cloud folder F, then user A potentially has access to + the Google Cloud folder F. And those advanced analysis + results will be included in [AnalyzeIamPolicyResponse.service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis]. + Only the following permissions are considered in this + analysis: + + - ``iam.serviceAccounts.actAs`` + - ``iam.serviceAccounts.signBlob`` + - ``iam.serviceAccounts.signJwt`` + - ``iam.serviceAccounts.getAccessToken`` + - ``iam.serviceAccounts.getOpenIdToken`` + - ``iam.serviceAccounts.implicitDelegation`` + Default is false. """ @@ -1548,6 +1770,25 @@ class AnalyzeIamPolicyRequest(proto.Message): Attributes: analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): Required. The request query. + saved_analysis_query (str): + Optional. The name of a saved query, which must be in the + format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + If both ``analysis_query`` and ``saved_analysis_query`` are + provided, they will be merged together with the + ``saved_analysis_query`` as base and the ``analysis_query`` + as overrides. For more details of the merge behavior, please + refer to the + `MergeFrom `__ + page. + + Note that you cannot override primitive fields with default + value, such as 0 or empty string, etc., because we use + proto3, which doesn't support field presence yet. execution_timeout (google.protobuf.duration_pb2.Duration): Optional. Amount of time executable has to complete. See JSON representation of @@ -1568,6 +1809,10 @@ class AnalyzeIamPolicyRequest(proto.Message): number=1, message='IamPolicyAnalysisQuery', ) + saved_analysis_query: str = proto.Field( + proto.STRING, + number=3, + ) execution_timeout: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=2, @@ -1678,8 +1923,8 @@ class GcsDestination(proto.Message): Attributes: uri (str): - Required. The uri of the Cloud Storage object. It's the same - uri that is used by gsutil. Example: + Required. The URI of the Cloud Storage object. It's the same + URI that is used by gsutil. Example: "gs://bucket_name/object_name". See `Viewing and Editing Object Metadata `__ @@ -1799,6 +2044,25 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): Attributes: analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): Required. The request query. + saved_analysis_query (str): + Optional. The name of a saved query, which must be in the + format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + If both ``analysis_query`` and ``saved_analysis_query`` are + provided, they will be merged together with the + ``saved_analysis_query`` as base and the ``analysis_query`` + as overrides. For more details of the merge behavior, please + refer to the + `MergeFrom `__ + doc. + + Note that you cannot override primitive fields with default + value, such as 0 or empty string, etc., because we use + proto3, which doesn't support field presence yet. output_config (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig): Required. Output configuration indicating where the results will be output to. @@ -1809,6 +2073,10 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): number=1, message='IamPolicyAnalysisQuery', ) + saved_analysis_query: str = proto.Field( + proto.STRING, + number=3, + ) output_config: 'IamPolicyAnalysisOutputConfig' = proto.Field( proto.MESSAGE, number=2, @@ -1823,4 +2091,1921 @@ class AnalyzeIamPolicyLongrunningResponse(proto.Message): """ +class SavedQuery(proto.Message): + r"""A saved query which can be shared with others or used later. + + Attributes: + name (str): + The resource name of the saved query. The format must be: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + description (str): + The description of this saved query. This + value should be fewer than 255 characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The create time of this saved + query. + creator (str): + Output only. The account's email address who + has created this saved query. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update time of this + saved query. + last_updater (str): + Output only. The account's email address who + has updated this saved query most recently. + labels (MutableMapping[str, str]): + Labels applied on the resource. + This value should not contain more than 10 + entries. The key and value of each entry must be + non-empty and fewer than 64 characters. + content (google.cloud.asset_v1.types.SavedQuery.QueryContent): + The query content. + """ + + class QueryContent(proto.Message): + r"""The query content. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + iam_policy_analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): + An IAM Policy Analysis query, which could be used in the + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy] + RPC or the + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning] + RPC. + + This field is a member of `oneof`_ ``query_content``. + """ + + iam_policy_analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + proto.MESSAGE, + number=1, + oneof='query_content', + message='IamPolicyAnalysisQuery', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + creator: str = proto.Field( + proto.STRING, + number=4, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + last_updater: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + content: QueryContent = proto.Field( + proto.MESSAGE, + number=8, + message=QueryContent, + ) + + +class CreateSavedQueryRequest(proto.Message): + r"""Request to create a saved query. + + Attributes: + parent (str): + Required. The name of the project/folder/organization where + this saved_query should be created in. It can only be an + organization number (such as "organizations/123"), a folder + number (such as "folders/123"), a project ID (such as + "projects/my-project-id"), or a project number (such as + "projects/12345"). + saved_query (google.cloud.asset_v1.types.SavedQuery): + Required. The saved_query details. The ``name`` field must + be empty as it will be generated based on the parent and + saved_query_id. + saved_query_id (str): + Required. The ID to use for the saved query, which must be + unique in the specified parent. It will become the final + component of the saved query's resource name. + + This value should be 4-63 characters, and valid characters + are ``[a-z][0-9]-``. + + Notice that this field is required in the saved query + creation, and the ``name`` field of the ``saved_query`` will + be ignored. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + saved_query: 'SavedQuery' = proto.Field( + proto.MESSAGE, + number=2, + message='SavedQuery', + ) + saved_query_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetSavedQueryRequest(proto.Message): + r"""Request to get a saved query. + + Attributes: + name (str): + Required. The name of the saved query and it must be in the + format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSavedQueriesRequest(proto.Message): + r"""Request to list saved queries. + + Attributes: + parent (str): + Required. The parent + project/folder/organization whose savedQueries + are to be listed. It can only be using + project/folder/organization number (such as + "folders/12345")", or a project ID (such as + "projects/my-project-id"). + filter (str): + Optional. The expression to filter resources. The expression + is a list of zero or more restrictions combined via logical + operators ``AND`` and ``OR``. When ``AND`` and ``OR`` are + both used in the expression, parentheses must be + appropriately used to group the combinations. The expression + may also contain regular expressions. + + See https://google.aip.dev/160 for more information on the + grammar. + page_size (int): + Optional. The maximum number of saved queries + to return per page. The service may return fewer + than this value. If unspecified, at most 50 will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListSavedQueries`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListSavedQueries`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSavedQueriesResponse(proto.Message): + r"""Response of listing saved queries. + + Attributes: + saved_queries (MutableSequence[google.cloud.asset_v1.types.SavedQuery]): + A list of savedQueries. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + saved_queries: MutableSequence['SavedQuery'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SavedQuery', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateSavedQueryRequest(proto.Message): + r"""Request to update a saved query. + + Attributes: + saved_query (google.cloud.asset_v1.types.SavedQuery): + Required. The saved query to update. + + The saved query's ``name`` field is used to identify the one + to update, which has format as below: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + saved_query: 'SavedQuery' = proto.Field( + proto.MESSAGE, + number=1, + message='SavedQuery', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteSavedQueryRequest(proto.Message): + r"""Request to delete a saved query. + + Attributes: + name (str): + Required. The name of the saved query to delete. It must be + in the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AnalyzeMoveRequest(proto.Message): + r"""The request message for performing resource move analysis. + + Attributes: + resource (str): + Required. Name of the resource to perform the + analysis against. Only Google Cloud projects are + supported as of today. Hence, this can only be a + project ID (such as "projects/my-project-id") or + a project number (such as "projects/12345"). + destination_parent (str): + Required. Name of the Google Cloud folder or + organization to reparent the target resource. + The analysis will be performed against + hypothetically moving the resource to this + specified desitination parent. This can only be + a folder number (such as "folders/123") or an + organization number (such as + "organizations/123"). + view (google.cloud.asset_v1.types.AnalyzeMoveRequest.AnalysisView): + Analysis view indicating what information + should be included in the analysis response. If + unspecified, the default view is FULL. + """ + class AnalysisView(proto.Enum): + r"""View enum for supporting partial analysis responses. + + Values: + ANALYSIS_VIEW_UNSPECIFIED (0): + The default/unset value. + The API will default to the FULL view. + FULL (1): + Full analysis including all level of impacts + of the specified resource move. + BASIC (2): + Basic analysis only including blockers which + will prevent the specified resource move at + runtime. + """ + ANALYSIS_VIEW_UNSPECIFIED = 0 + FULL = 1 + BASIC = 2 + + resource: str = proto.Field( + proto.STRING, + number=1, + ) + destination_parent: str = proto.Field( + proto.STRING, + number=2, + ) + view: AnalysisView = proto.Field( + proto.ENUM, + number=3, + enum=AnalysisView, + ) + + +class AnalyzeMoveResponse(proto.Message): + r"""The response message for resource move analysis. + + Attributes: + move_analysis (MutableSequence[google.cloud.asset_v1.types.MoveAnalysis]): + The list of analyses returned from performing + the intended resource move analysis. The + analysis is grouped by different Google Cloud + services. + """ + + move_analysis: MutableSequence['MoveAnalysis'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='MoveAnalysis', + ) + + +class MoveAnalysis(proto.Message): + r"""A message to group the analysis information. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + The user friendly display name of the + analysis. E.g. IAM, organization policy etc. + analysis (google.cloud.asset_v1.types.MoveAnalysisResult): + Analysis result of moving the target + resource. + + This field is a member of `oneof`_ ``result``. + error (google.rpc.status_pb2.Status): + Description of error encountered when + performing the analysis. + + This field is a member of `oneof`_ ``result``. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + analysis: 'MoveAnalysisResult' = proto.Field( + proto.MESSAGE, + number=2, + oneof='result', + message='MoveAnalysisResult', + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + oneof='result', + message=status_pb2.Status, + ) + + +class MoveAnalysisResult(proto.Message): + r"""An analysis result including blockers and warnings. + + Attributes: + blockers (MutableSequence[google.cloud.asset_v1.types.MoveImpact]): + Blocking information that would prevent the + target resource from moving to the specified + destination at runtime. + warnings (MutableSequence[google.cloud.asset_v1.types.MoveImpact]): + Warning information indicating that moving + the target resource to the specified destination + might be unsafe. This can include important + policy information and configuration changes, + but will not block moves at runtime. + """ + + blockers: MutableSequence['MoveImpact'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='MoveImpact', + ) + warnings: MutableSequence['MoveImpact'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='MoveImpact', + ) + + +class MoveImpact(proto.Message): + r"""A message to group impacts of moving the target resource. + + Attributes: + detail (str): + User friendly impact detail in a free form + message. + """ + + detail: str = proto.Field( + proto.STRING, + number=1, + ) + + +class QueryAssetsOutputConfig(proto.Message): + r"""Output configuration query assets. + + Attributes: + bigquery_destination (google.cloud.asset_v1.types.QueryAssetsOutputConfig.BigQueryDestination): + BigQuery destination where the query results + will be saved. + """ + + class BigQueryDestination(proto.Message): + r"""BigQuery destination. + + Attributes: + dataset (str): + Required. The BigQuery dataset where the + query results will be saved. It has the format + of "projects/{projectId}/datasets/{datasetId}". + table (str): + Required. The BigQuery table where the query + results will be saved. If this table does not + exist, a new table with the given name will be + created. + write_disposition (str): + Specifies the action that occurs if the destination table or + partition already exists. The following values are + supported: + + - WRITE_TRUNCATE: If the table or partition already exists, + BigQuery overwrites the entire table or all the + partitions data. + - WRITE_APPEND: If the table or partition already exists, + BigQuery appends the data to the table or the latest + partition. + - WRITE_EMPTY: If the table already exists and contains + data, a 'duplicate' error is returned in the job result. + + The default value is WRITE_EMPTY. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + table: str = proto.Field( + proto.STRING, + number=2, + ) + write_disposition: str = proto.Field( + proto.STRING, + number=3, + ) + + bigquery_destination: BigQueryDestination = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryDestination, + ) + + +class QueryAssetsRequest(proto.Message): + r"""QueryAssets request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The relative name of the root asset. This can only + be an organization number (such as "organizations/123"), a + project ID (such as "projects/my-project-id"), or a project + number (such as "projects/12345"), or a folder number (such + as "folders/123"). + + Only assets belonging to the ``parent`` will be returned. + statement (str): + Optional. A SQL statement that's compatible with `BigQuery + Standard + SQL `__. + + This field is a member of `oneof`_ ``query``. + job_reference (str): + Optional. Reference to the query job, which is from the + ``QueryAssetsResponse`` of previous ``QueryAssets`` call. + + This field is a member of `oneof`_ ``query``. + page_size (int): + Optional. The maximum number of rows to return in the + results. Responses are limited to 10 MB and 1000 rows. + + By default, the maximum row count is 1000. When the byte or + row count limit is reached, the rest of the query results + will be paginated. + + The field will be ignored when [output_config] is specified. + page_token (str): + Optional. A page token received from previous + ``QueryAssets``. + + The field will be ignored when [output_config] is specified. + timeout (google.protobuf.duration_pb2.Duration): + Optional. Specifies the maximum amount of time that the + client is willing to wait for the query to complete. By + default, this limit is 5 min for the first query, and 1 + minute for the following queries. If the query is complete, + the ``done`` field in the ``QueryAssetsResponse`` is true, + otherwise false. + + Like BigQuery `jobs.query + API `__ + The call is not guaranteed to wait for the specified + timeout; it typically returns after around 200 seconds + (200,000 milliseconds), even if the query is not complete. + + The field will be ignored when [output_config] is specified. + read_time_window (google.cloud.asset_v1.types.TimeWindow): + Optional. [start_time] is required. [start_time] must be + less than [end_time] Defaults [end_time] to now if + [start_time] is set and [end_time] isn't. Maximum permitted + time range is 7 days. + + This field is a member of `oneof`_ ``time``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Queries cloud assets as they + appeared at the specified point in time. + + This field is a member of `oneof`_ ``time``. + output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): + Optional. Destination where the query results will be saved. + + When this field is specified, the query results won't be + saved in the [QueryAssetsResponse.query_result]. Instead + [QueryAssetsResponse.output_config] will be set. + + Meanwhile, [QueryAssetsResponse.job_reference] will be set + and can be used to check the status of the query job when + passed to a following [QueryAssets] API call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + statement: str = proto.Field( + proto.STRING, + number=2, + oneof='query', + ) + job_reference: str = proto.Field( + proto.STRING, + number=3, + oneof='query', + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + read_time_window: gca_assets.TimeWindow = proto.Field( + proto.MESSAGE, + number=7, + oneof='time', + message=gca_assets.TimeWindow, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + oneof='time', + message=timestamp_pb2.Timestamp, + ) + output_config: 'QueryAssetsOutputConfig' = proto.Field( + proto.MESSAGE, + number=9, + message='QueryAssetsOutputConfig', + ) + + +class QueryAssetsResponse(proto.Message): + r"""QueryAssets response. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + job_reference (str): + Reference to a query job. + done (bool): + The query response, which can be either an ``error`` or a + valid ``response``. + + If ``done`` == ``false`` and the query result is being saved + in a output, the output_config field will be set. If + ``done`` == ``true``, exactly one of ``error``, + ``query_result`` or ``output_config`` will be set. + error (google.rpc.status_pb2.Status): + Error status. + + This field is a member of `oneof`_ ``response``. + query_result (google.cloud.asset_v1.types.QueryResult): + Result of the query. + + This field is a member of `oneof`_ ``response``. + output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): + Output configuration which indicates instead + of being returned in API response on the fly, + the query result will be saved in a specific + output. + + This field is a member of `oneof`_ ``response``. + """ + + job_reference: str = proto.Field( + proto.STRING, + number=1, + ) + done: bool = proto.Field( + proto.BOOL, + number=2, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + oneof='response', + message=status_pb2.Status, + ) + query_result: 'QueryResult' = proto.Field( + proto.MESSAGE, + number=4, + oneof='response', + message='QueryResult', + ) + output_config: 'QueryAssetsOutputConfig' = proto.Field( + proto.MESSAGE, + number=5, + oneof='response', + message='QueryAssetsOutputConfig', + ) + + +class QueryResult(proto.Message): + r"""Execution results of the query. + + The result is formatted as rows represented by BigQuery compatible + [schema]. When pagination is necessary, it will contains the page + token to retrieve the results of following pages. + + Attributes: + rows (MutableSequence[google.protobuf.struct_pb2.Struct]): + Each row hold a query result in the format of ``Struct``. + schema (google.cloud.asset_v1.types.TableSchema): + Describes the format of the [rows]. + next_page_token (str): + Token to retrieve the next page of the + results. + total_rows (int): + Total rows of the whole query results. + """ + + @property + def raw_page(self): + return self + + rows: MutableSequence[struct_pb2.Struct] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + schema: 'TableSchema' = proto.Field( + proto.MESSAGE, + number=2, + message='TableSchema', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + total_rows: int = proto.Field( + proto.INT64, + number=4, + ) + + +class TableSchema(proto.Message): + r"""BigQuery Compatible table schema. + + Attributes: + fields (MutableSequence[google.cloud.asset_v1.types.TableFieldSchema]): + Describes the fields in a table. + """ + + fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='TableFieldSchema', + ) + + +class TableFieldSchema(proto.Message): + r"""A field in TableSchema. + + Attributes: + field (str): + The field name. The name must contain only letters (a-z, + A-Z), numbers (0-9), or underscores (_), and must start with + a letter or underscore. The maximum length is 128 + characters. + type_ (str): + The field data type. Possible values include + + - STRING + - BYTES + - INTEGER + - FLOAT + - BOOLEAN + - TIMESTAMP + - DATE + - TIME + - DATETIME + - GEOGRAPHY, + - NUMERIC, + - BIGNUMERIC, + - RECORD (where RECORD indicates that the field contains a + nested schema). + mode (str): + The field mode. Possible values include + NULLABLE, REQUIRED and REPEATED. The default + value is NULLABLE. + fields (MutableSequence[google.cloud.asset_v1.types.TableFieldSchema]): + Describes the nested schema fields if the + type property is set to RECORD. + """ + + field: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + mode: str = proto.Field( + proto.STRING, + number=3, + ) + fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='TableFieldSchema', + ) + + +class BatchGetEffectiveIamPoliciesRequest(proto.Message): + r"""A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + Attributes: + scope (str): + Required. Only IAM policies on or below the scope will be + returned. + + This can only be an organization number (such as + "organizations/123"), a folder number (such as + "folders/123"), a project ID (such as + "projects/my-project-id"), or a project number (such as + "projects/12345"). + + To know how to get organization id, visit + `here `__. + + To know how to get folder or project id, visit + `here `__. + names (MutableSequence[str]): + Required. The names refer to the [full_resource_names] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of `searchable asset + types `__. + A maximum of 20 resources' effective policies can be + retrieved in a batch. + """ + + scope: str = proto.Field( + proto.STRING, + number=1, + ) + names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class BatchGetEffectiveIamPoliciesResponse(proto.Message): + r"""A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + Attributes: + policy_results (MutableSequence[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy]): + The effective policies for a batch of resources. Note that + the results order is the same as the order of + [BatchGetEffectiveIamPoliciesRequest.names][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesRequest.names]. + When a resource does not have any effective IAM policies, + its corresponding policy_result will contain empty + [EffectiveIamPolicy.policies][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.policies]. + """ + + class EffectiveIamPolicy(proto.Message): + r"""The effective IAM policies on one resource. + + Attributes: + full_resource_name (str): + The [full_resource_name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + for which the + [policies][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.policies] + are computed. This is one of the + [BatchGetEffectiveIamPoliciesRequest.names][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesRequest.names] + the caller provides in the request. + policies (MutableSequence[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo]): + The effective policies for the + [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name]. + + These policies include the policy set on the + [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name] + and those set on its parents and ancestors up to the + [BatchGetEffectiveIamPoliciesRequest.scope][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesRequest.scope]. + Note that these policies are not filtered according to the + resource type of the + [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name]. + + These policies are hierarchically ordered by + [PolicyInfo.attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource] + starting from + [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name] + itself to its parents and ancestors, such that policies[i]'s + [PolicyInfo.attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource] + is the child of policies[i+1]'s + [PolicyInfo.attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource], + if policies[i+1] exists. + """ + + class PolicyInfo(proto.Message): + r"""The IAM policy and its attached resource. + + Attributes: + attached_resource (str): + The full resource name the + [policy][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.policy] + is directly attached to. + policy (google.iam.v1.policy_pb2.Policy): + The IAM policy that's directly attached to the + [attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource]. + """ + + attached_resource: str = proto.Field( + proto.STRING, + number=1, + ) + policy: policy_pb2.Policy = proto.Field( + proto.MESSAGE, + number=2, + message=policy_pb2.Policy, + ) + + full_resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + policies: MutableSequence['BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo', + ) + + policy_results: MutableSequence[EffectiveIamPolicy] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=EffectiveIamPolicy, + ) + + +class AnalyzerOrgPolicy(proto.Message): + r"""This organization policy message is a modified version of the + one defined in the Organization Policy system. This message + contains several fields defined in the original organization + policy with some new fields for analysis purpose. + + Attributes: + attached_resource (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of an organization/folder/project resource where this + organization policy is set. + + Notice that some type of constraints are defined with + default policy. This field will be empty for them. + applied_resource (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of an organization/folder/project resource where this + organization policy applies to. + + For any user defined org policies, this field has the same + value as the [attached_resource] field. Only for default + policy, this field has the different value. + rules (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy.Rule]): + List of rules for this organization policy. + inherit_from_parent (bool): + If ``inherit_from_parent`` is true, Rules set higher up in + the hierarchy (up to the closest root) are inherited and + present in the effective policy. If it is false, then no + rules are inherited, and this policy becomes the effective + root for evaluation. + reset (bool): + Ignores policies set above this resource and restores the + default behavior of the constraint at this resource. This + field can be set in policies for either list or boolean + constraints. If set, ``rules`` must be empty and + ``inherit_from_parent`` must be set to false. + """ + + class Rule(proto.Message): + r"""Represents a rule defined in an organization policy + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + values (google.cloud.asset_v1.types.AnalyzerOrgPolicy.Rule.StringValues): + List of values to be used for this + PolicyRule. This field can be set only in + Policies for list constraints. + + This field is a member of `oneof`_ ``kind``. + allow_all (bool): + Setting this to true means that all values + are allowed. This field can be set only in + Policies for list constraints. + + This field is a member of `oneof`_ ``kind``. + deny_all (bool): + Setting this to true means that all values + are denied. This field can be set only in + Policies for list constraints. + + This field is a member of `oneof`_ ``kind``. + enforce (bool): + If ``true``, then the ``Policy`` is enforced. If ``false``, + then any configuration is acceptable. This field can be set + only in Policies for boolean constraints. + + This field is a member of `oneof`_ ``kind``. + condition (google.type.expr_pb2.Expr): + The evaluating condition for this rule. + """ + + class StringValues(proto.Message): + r"""The string values for the list constraints. + + Attributes: + allowed_values (MutableSequence[str]): + List of values allowed at this resource. + denied_values (MutableSequence[str]): + List of values denied at this resource. + """ + + allowed_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + denied_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + values: 'AnalyzerOrgPolicy.Rule.StringValues' = proto.Field( + proto.MESSAGE, + number=3, + oneof='kind', + message='AnalyzerOrgPolicy.Rule.StringValues', + ) + allow_all: bool = proto.Field( + proto.BOOL, + number=4, + oneof='kind', + ) + deny_all: bool = proto.Field( + proto.BOOL, + number=5, + oneof='kind', + ) + enforce: bool = proto.Field( + proto.BOOL, + number=6, + oneof='kind', + ) + condition: expr_pb2.Expr = proto.Field( + proto.MESSAGE, + number=7, + message=expr_pb2.Expr, + ) + + attached_resource: str = proto.Field( + proto.STRING, + number=1, + ) + applied_resource: str = proto.Field( + proto.STRING, + number=5, + ) + rules: MutableSequence[Rule] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Rule, + ) + inherit_from_parent: bool = proto.Field( + proto.BOOL, + number=3, + ) + reset: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class AnalyzerOrgPolicyConstraint(proto.Message): + r"""The organization policy constraint definition. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + google_defined_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint): + The definition of the canned constraint + defined by Google. + + This field is a member of `oneof`_ ``constraint_definition``. + custom_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint): + The definition of the custom constraint. + + This field is a member of `oneof`_ ``constraint_definition``. + """ + + class Constraint(proto.Message): + r"""The definition of a constraint. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The unique name of the constraint. Format of the name should + be + + - ``constraints/{constraint_name}`` + + For example, + ``constraints/compute.disableSerialPortAccess``. + display_name (str): + The human readable name of the constraint. + description (str): + Detailed description of what this ``Constraint`` controls as + well as how and where it is enforced. + constraint_default (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault): + The evaluation behavior of this constraint in + the absence of 'Policy'. + list_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint.ListConstraint): + Defines this constraint as being a + ListConstraint. + + This field is a member of `oneof`_ ``constraint_type``. + boolean_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint): + Defines this constraint as being a + BooleanConstraint. + + This field is a member of `oneof`_ ``constraint_type``. + """ + class ConstraintDefault(proto.Enum): + r"""Specifies the default behavior in the absence of any ``Policy`` for + the ``Constraint``. This must not be + ``CONSTRAINT_DEFAULT_UNSPECIFIED``. + + Values: + CONSTRAINT_DEFAULT_UNSPECIFIED (0): + This is only used for distinguishing unset + values and should never be used. + ALLOW (1): + Indicate that all values are allowed for list + constraints. Indicate that enforcement is off + for boolean constraints. + DENY (2): + Indicate that all values are denied for list + constraints. Indicate that enforcement is on for + boolean constraints. + """ + CONSTRAINT_DEFAULT_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + + class ListConstraint(proto.Message): + r"""A ``Constraint`` that allows or disallows a list of string values, + which are configured by an organization's policy administrator with + a ``Policy``. + + Attributes: + supports_in (bool): + Indicates whether values grouped into categories can be used + in ``Policy.allowed_values`` and ``Policy.denied_values``. + For example, ``"in:Python"`` would match any value in the + 'Python' group. + supports_under (bool): + Indicates whether subtrees of Cloud Resource Manager + resource hierarchy can be used in ``Policy.allowed_values`` + and ``Policy.denied_values``. For example, + ``"under:folders/123"`` would match any resource under the + 'folders/123' folder. + """ + + supports_in: bool = proto.Field( + proto.BOOL, + number=1, + ) + supports_under: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class BooleanConstraint(proto.Message): + r"""A ``Constraint`` that is either enforced or not. + + For example a constraint + ``constraints/compute.disableSerialPortAccess``. If it is enforced + on a VM instance, serial port connections will not be opened to that + instance. + + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + constraint_default: 'AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault' = proto.Field( + proto.ENUM, + number=4, + enum='AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault', + ) + list_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.ListConstraint' = proto.Field( + proto.MESSAGE, + number=5, + oneof='constraint_type', + message='AnalyzerOrgPolicyConstraint.Constraint.ListConstraint', + ) + boolean_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint' = proto.Field( + proto.MESSAGE, + number=6, + oneof='constraint_type', + message='AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint', + ) + + class CustomConstraint(proto.Message): + r"""The definition of a custom constraint. + + Attributes: + name (str): + Name of the constraint. This is unique within the + organization. Format of the name should be + + - ``organizations/{organization_id}/customConstraints/{custom_constraint_id}`` + + Example : + "organizations/123/customConstraints/custom.createOnlyE2TypeVms". + resource_types (MutableSequence[str]): + The Resource Instance type on which this policy applies to. + Format will be of the form : "/" Example: + + - ``compute.googleapis.com/Instance``. + method_types (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType]): + All the operations being applied for this + constraint. + condition (str): + Organization Policy condition/expression. For example: + ``resource.instanceName.matches("[production|test]_.*_(\d)+")'`` + or, ``resource.management.auto_upgrade == true`` + action_type (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType): + Allow or deny type. + display_name (str): + One line display name for the UI. + description (str): + Detailed information about this custom policy + constraint. + """ + class MethodType(proto.Enum): + r"""The operation in which this constraint will be applied. For example: + If the constraint applies only when create VMs, the method_types + will be "CREATE" only. If the constraint applied when create or + delete VMs, the method_types will be "CREATE" and "DELETE". + + Values: + METHOD_TYPE_UNSPECIFIED (0): + Unspecified. Will results in user error. + CREATE (1): + Constraint applied when creating the + resource. + UPDATE (2): + Constraint applied when updating the + resource. + DELETE (3): + Constraint applied when deleting the + resource. + """ + METHOD_TYPE_UNSPECIFIED = 0 + CREATE = 1 + UPDATE = 2 + DELETE = 3 + + class ActionType(proto.Enum): + r"""Allow or deny type. + + Values: + ACTION_TYPE_UNSPECIFIED (0): + Unspecified. Will results in user error. + ALLOW (1): + Allowed action type. + DENY (2): + Deny action type. + """ + ACTION_TYPE_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + resource_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + method_types: MutableSequence['AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType'] = proto.RepeatedField( + proto.ENUM, + number=3, + enum='AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType', + ) + condition: str = proto.Field( + proto.STRING, + number=4, + ) + action_type: 'AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType' = proto.Field( + proto.ENUM, + number=5, + enum='AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType', + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + + google_defined_constraint: Constraint = proto.Field( + proto.MESSAGE, + number=1, + oneof='constraint_definition', + message=Constraint, + ) + custom_constraint: CustomConstraint = proto.Field( + proto.MESSAGE, + number=2, + oneof='constraint_definition', + message=CustomConstraint, + ) + + +class AnalyzeOrgPoliciesRequest(proto.Message): + r"""A request message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + constraint (str): + Required. The name of the constraint to + analyze organization policies for. The response + only contains analyzed organization policies for + the provided constraint. + filter (str): + The expression to filter + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. + The only supported field is + ``consolidated_policy.attached_resource``, and the only + supported operator is ``=``. + + Example: + consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" + will return the org policy results of"folders/001". + page_size (int): + The maximum number of items to return per page. If + unspecified, + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results] + will contain 20 items with a maximum of 200. + + This field is a member of `oneof`_ ``_page_size``. + page_token (str): + The pagination token to retrieve the next + page. + """ + + scope: str = proto.Field( + proto.STRING, + number=1, + ) + constraint: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AnalyzeOrgPoliciesResponse(proto.Message): + r"""The response message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + + Attributes: + org_policy_results (MutableSequence[google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse.OrgPolicyResult]): + The organization policies under the + [AnalyzeOrgPoliciesRequest.scope][google.cloud.asset.v1.AnalyzeOrgPoliciesRequest.scope] + with the + [AnalyzeOrgPoliciesRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPoliciesRequest.constraint]. + constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint): + The definition of the constraint in the + request. + next_page_token (str): + The page token to fetch the next page for + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. + """ + + class OrgPolicyResult(proto.Message): + r"""The organization policy result to the query. + + Attributes: + consolidated_policy (google.cloud.asset_v1.types.AnalyzerOrgPolicy): + The consolidated organization policy for the analyzed + resource. The consolidated organization policy is computed + by merging and evaluating + [AnalyzeOrgPoliciesResponse.policy_bundle][]. The evaluation + will respect the organization policy `hierarchy + rules `__. + policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): + The ordered list of all organization policies from the + [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][]. + to the scope specified in the request. + + If the constraint is defined with default policy, it will + also appear in the list. + """ + + consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + proto.MESSAGE, + number=1, + message='AnalyzerOrgPolicy', + ) + policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='AnalyzerOrgPolicy', + ) + + @property + def raw_page(self): + return self + + org_policy_results: MutableSequence[OrgPolicyResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=OrgPolicyResult, + ) + constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + proto.MESSAGE, + number=2, + message='AnalyzerOrgPolicyConstraint', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AnalyzeOrgPolicyGovernedContainersRequest(proto.Message): + r"""A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. The + output containers will also be limited to the ones governed + by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + constraint (str): + Required. The name of the constraint to + analyze governed containers for. The analysis + only contains organization policies for the + provided constraint. + filter (str): + The expression to filter the governed containers in result. + The only supported field is ``parent``, and the only + supported operator is ``=``. + + Example: + parent="//cloudresourcemanager.googleapis.com/folders/001" + will return all containers under "folders/001". + page_size (int): + The maximum number of items to return per page. If + unspecified, + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers] + will contain 100 items with a maximum of 200. + + This field is a member of `oneof`_ ``_page_size``. + page_token (str): + The pagination token to retrieve the next + page. + """ + + scope: str = proto.Field( + proto.STRING, + number=1, + ) + constraint: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AnalyzeOrgPolicyGovernedContainersResponse(proto.Message): + r"""The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + Attributes: + governed_containers (MutableSequence[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]): + The list of the analyzed governed containers. + constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint): + The definition of the constraint in the + request. + next_page_token (str): + The page token to fetch the next page for + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. + """ + + class GovernedContainer(proto.Message): + r"""The organization/folder/project resource governed by organization + policies of + [AnalyzeOrgPolicyGovernedContainersRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersRequest.constraint]. + + Attributes: + full_resource_name (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of an organization/folder/project resource. + parent (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of the parent of + [AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.full_resource_name][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.full_resource_name]. + consolidated_policy (google.cloud.asset_v1.types.AnalyzerOrgPolicy): + The consolidated organization policy for the analyzed + resource. The consolidated organization policy is computed + by merging and evaluating + [AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.policy_bundle][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.policy_bundle]. + The evaluation will respect the organization policy + `hierarchy + rules `__. + policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): + The ordered list of all organization policies from the + [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][]. + to the scope specified in the request. + + If the constraint is defined with default policy, it will + also appear in the list. + """ + + full_resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + parent: str = proto.Field( + proto.STRING, + number=2, + ) + consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + proto.MESSAGE, + number=3, + message='AnalyzerOrgPolicy', + ) + policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzerOrgPolicy', + ) + + @property + def raw_page(self): + return self + + governed_containers: MutableSequence[GovernedContainer] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=GovernedContainer, + ) + constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + proto.MESSAGE, + number=2, + message='AnalyzerOrgPolicyConstraint', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AnalyzeOrgPolicyGovernedAssetsRequest(proto.Message): + r"""A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. The + output assets will also be limited to the ones governed by + those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + constraint (str): + Required. The name of the constraint to + analyze governed assets for. The analysis only + contains analyzed organization policies for the + provided constraint. + filter (str): + The expression to filter the governed assets in result. The + only supported fields for governed resources are + ``governed_resource.project`` and + ``governed_resource.folders``. The only supported fields for + governed iam policies are ``governed_iam_policy.project`` + and ``governed_iam_policy.folders``. The only supported + operator is ``=``. + + Example 1: governed_resource.project="projects/12345678" + filter will return all governed resources under + projects/12345678 including the project ifself, if + applicable. + + Example 2: governed_iam_policy.folders="folders/12345678" + filter will return all governed iam policies under + folders/12345678, if applicable. + page_size (int): + The maximum number of items to return per page. If + unspecified, + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets] + will contain 100 items with a maximum of 200. + + This field is a member of `oneof`_ ``_page_size``. + page_token (str): + The pagination token to retrieve the next + page. + """ + + scope: str = proto.Field( + proto.STRING, + number=1, + ) + constraint: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AnalyzeOrgPolicyGovernedAssetsResponse(proto.Message): + r"""The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + + Attributes: + governed_assets (MutableSequence[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]): + The list of the analyzed governed assets. + constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint): + The definition of the constraint in the + request. + next_page_token (str): + The page token to fetch the next page for + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. + """ + + class GovernedResource(proto.Message): + r"""The Google Cloud resources governed by the organization policies of + the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + Attributes: + full_resource_name (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of the Google Cloud resource. + parent (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of the parent of + [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name]. + project (str): + The project that this resource belongs to, in the format of + projects/{PROJECT_NUMBER}. This field is available when the + resource belongs to a project. + folders (MutableSequence[str]): + The folder(s) that this resource belongs to, in the format + of folders/{FOLDER_NUMBER}. This field is available when the + resource belongs (directly or cascadingly) to one or more + folders. + organization (str): + The organization that this resource belongs to, in the + format of organizations/{ORGANIZATION_NUMBER}. This field is + available when the resource belongs (directly or + cascadingly) to an organization. + """ + + full_resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + parent: str = proto.Field( + proto.STRING, + number=2, + ) + project: str = proto.Field( + proto.STRING, + number=5, + ) + folders: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + organization: str = proto.Field( + proto.STRING, + number=7, + ) + + class GovernedIamPolicy(proto.Message): + r"""The IAM policies governed by the organization policies of the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + Attributes: + attached_resource (str): + The full resource name of the resource associated with this + IAM policy. Example: + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. + See `Cloud Asset Inventory Resource Name + Format `__ + for more information. + policy (google.iam.v1.policy_pb2.Policy): + The IAM policy directly set on the given + resource. + project (str): + The project that this IAM policy belongs to, in the format + of projects/{PROJECT_NUMBER}. This field is available when + the IAM policy belongs to a project. + folders (MutableSequence[str]): + The folder(s) that this IAM policy belongs to, in the format + of folders/{FOLDER_NUMBER}. This field is available when the + IAM policy belongs (directly or cascadingly) to one or more + folders. + organization (str): + The organization that this IAM policy belongs to, in the + format of organizations/{ORGANIZATION_NUMBER}. This field is + available when the IAM policy belongs (directly or + cascadingly) to an organization. + """ + + attached_resource: str = proto.Field( + proto.STRING, + number=1, + ) + policy: policy_pb2.Policy = proto.Field( + proto.MESSAGE, + number=2, + message=policy_pb2.Policy, + ) + project: str = proto.Field( + proto.STRING, + number=5, + ) + folders: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + organization: str = proto.Field( + proto.STRING, + number=7, + ) + + class GovernedAsset(proto.Message): + r"""Represents a Google Cloud asset(resource or IAM policy) governed by + the organization policies of the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + governed_resource (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource): + A Google Cloud resource governed by the organization + policies of the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + This field is a member of `oneof`_ ``governed_asset``. + governed_iam_policy (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy): + An IAM policy governed by the organization policies of the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + This field is a member of `oneof`_ ``governed_asset``. + consolidated_policy (google.cloud.asset_v1.types.AnalyzerOrgPolicy): + The consolidated policy for the analyzed asset. The + consolidated policy is computed by merging and evaluating + [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.policy_bundle][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.policy_bundle]. + The evaluation will respect the organization policy + `hierarchy + rules `__. + policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): + The ordered list of all organization policies from the + [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][] + to the scope specified in the request. + + If the constraint is defined with default policy, it will + also appear in the list. + """ + + governed_resource: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource' = proto.Field( + proto.MESSAGE, + number=1, + oneof='governed_asset', + message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource', + ) + governed_iam_policy: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy' = proto.Field( + proto.MESSAGE, + number=2, + oneof='governed_asset', + message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy', + ) + consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + proto.MESSAGE, + number=3, + message='AnalyzerOrgPolicy', + ) + policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzerOrgPolicy', + ) + + @property + def raw_page(self): + return self + + governed_assets: MutableSequence[GovernedAsset] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=GovernedAsset, + ) + constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + proto.MESSAGE, + number=2, + message='AnalyzerOrgPolicyConstraint', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index fca415244b..41a578401e 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -37,7 +37,14 @@ 'TimeWindow', 'Asset', 'Resource', + 'RelatedAssets', + 'RelationshipAttributes', + 'RelatedAsset', 'ResourceSearchResult', + 'VersionedResource', + 'AttachedResource', + 'RelatedResources', + 'RelatedResource', 'IamPolicySearchResult', 'IamPolicyAnalysisState', 'ConditionEvaluation', @@ -143,8 +150,8 @@ class Asset(proto.Message): hierarchy `__, a resource outside the Google Cloud resource hierarchy (such as Google Kubernetes Engine clusters and objects), or a policy (e.g. - Cloud IAM policy), or a relationship (e.g. an - INSTANCE_TO_INSTANCEGROUP relationship). See `Supported asset + IAM policy), or a relationship (e.g. an INSTANCE_TO_INSTANCEGROUP + relationship). See `Supported asset types `__ for more information. @@ -176,16 +183,15 @@ class Asset(proto.Message): resource (google.cloud.asset_v1.types.Resource): A representation of the resource. iam_policy (google.iam.v1.policy_pb2.Policy): - A representation of the Cloud IAM policy set on a Google - Cloud resource. There can be a maximum of one Cloud IAM - policy set on any given resource. In addition, Cloud IAM - policies inherit their granted access scope from any - policies set on parent resources in the resource hierarchy. - Therefore, the effectively policy is the union of both the - policy set on this resource and each policy set on all of - the resource's ancestry resource levels in the hierarchy. - See `this - topic `__ + A representation of the IAM policy set on a Google Cloud + resource. There can be a maximum of one IAM policy set on + any given resource. In addition, IAM policies inherit their + granted access scope from any policies set on parent + resources in the resource hierarchy. Therefore, the + effectively policy is the union of both the policy set on + this resource and each policy set on all of the resource's + ancestry resource levels in the hierarchy. See `this + topic `__ for more information. org_policy (MutableSequence[google.cloud.orgpolicy.v1.orgpolicy_pb2.Policy]): A representation of an `organization @@ -212,6 +218,15 @@ class Asset(proto.Message): `this topic `__ for more information. + related_assets (google.cloud.asset_v1.types.RelatedAssets): + DEPRECATED. This field only presents for the + purpose of backward-compatibility. The server + will never generate responses with this field. + The related assets of the asset of one + relationship type. One asset only represents one + type of relationship. + related_asset (google.cloud.asset_v1.types.RelatedAsset): + One related asset of the current asset. ancestors (MutableSequence[str]): The ancestry path of an asset in Google Cloud `resource hierarchy `__, @@ -276,6 +291,16 @@ class Asset(proto.Message): number=12, message=inventory_pb2.Inventory, ) + related_assets: 'RelatedAssets' = proto.Field( + proto.MESSAGE, + number=13, + message='RelatedAssets', + ) + related_asset: 'RelatedAsset' = proto.Field( + proto.MESSAGE, + number=15, + message='RelatedAsset', + ) ancestors: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=10, @@ -314,7 +339,7 @@ class Resource(proto.Message): for more information. For Google Cloud assets, this value is the parent resource - defined in the `Cloud IAM policy + defined in the `IAM policy hierarchy `__. Example: ``//cloudresourcemanager.googleapis.com/projects/my_project_123`` @@ -362,9 +387,131 @@ class Resource(proto.Message): ) +class RelatedAssets(proto.Message): + r"""DEPRECATED. This message only presents for the purpose of + backward-compatibility. The server will never populate this message + in responses. The detailed related assets with the + ``relationship_type``. + + Attributes: + relationship_attributes (google.cloud.asset_v1.types.RelationshipAttributes): + The detailed relationship attributes. + assets (MutableSequence[google.cloud.asset_v1.types.RelatedAsset]): + The peer resources of the relationship. + """ + + relationship_attributes: 'RelationshipAttributes' = proto.Field( + proto.MESSAGE, + number=1, + message='RelationshipAttributes', + ) + assets: MutableSequence['RelatedAsset'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='RelatedAsset', + ) + + +class RelationshipAttributes(proto.Message): + r"""DEPRECATED. This message only presents for the purpose of + backward-compatibility. The server will never populate this message + in responses. The relationship attributes which include ``type``, + ``source_resource_type``, ``target_resource_type`` and ``action``. + + Attributes: + type_ (str): + The unique identifier of the relationship type. Example: + ``INSTANCE_TO_INSTANCEGROUP`` + source_resource_type (str): + The source asset type. Example: + ``compute.googleapis.com/Instance`` + target_resource_type (str): + The target asset type. Example: + ``compute.googleapis.com/Disk`` + action (str): + The detail of the relationship, e.g. ``contains``, + ``attaches`` + """ + + type_: str = proto.Field( + proto.STRING, + number=4, + ) + source_resource_type: str = proto.Field( + proto.STRING, + number=1, + ) + target_resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + action: str = proto.Field( + proto.STRING, + number=3, + ) + + +class RelatedAsset(proto.Message): + r"""An asset identifier in Google Cloud which contains its name, type + and ancestors. An asset can be any resource in the Google Cloud + `resource + hierarchy `__, + a resource outside the Google Cloud resource hierarchy (such as + Google Kubernetes Engine clusters and objects), or a policy (e.g. + IAM policy). See `Supported asset + types `__ + for more information. + + Attributes: + asset (str): + The full name of the asset. Example: + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`` + + See `Resource + names `__ + for more information. + asset_type (str): + The type of the asset. Example: + ``compute.googleapis.com/Disk`` + + See `Supported asset + types `__ + for more information. + ancestors (MutableSequence[str]): + The ancestors of an asset in Google Cloud `resource + hierarchy `__, + represented as a list of relative resource names. An + ancestry path starts with the closest ancestor in the + hierarchy and ends at root. + + Example: + ``["projects/123456789", "folders/5432", "organizations/1234"]`` + relationship_type (str): + The unique identifier of the relationship type. Example: + ``INSTANCE_TO_INSTANCEGROUP`` + """ + + asset: str = proto.Field( + proto.STRING, + number=1, + ) + asset_type: str = proto.Field( + proto.STRING, + number=2, + ) + ancestors: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + relationship_type: str = proto.Field( + proto.STRING, + number=4, + ) + + class ResourceSearchResult(proto.Message): r"""A result of Resource Search, containing information of a - cloud resource. + cloud resource. Next ID: 32 Attributes: name (str): @@ -376,15 +523,15 @@ class ResourceSearchResult(proto.Message): To search against the ``name``: - - use a field query. Example: ``name:instance1`` - - use a free text query. Example: ``instance1`` + - Use a field query. Example: ``name:instance1`` + - Use a free text query. Example: ``instance1`` asset_type (str): The type of this resource. Example: ``compute.googleapis.com/Disk``. To search against the ``asset_type``: - - specify the ``asset_type`` field in your search request. + - Specify the ``asset_type`` field in your search request. project (str): The project that this resource belongs to, in the form of projects/{PROJECT_NUMBER}. This field is available when the @@ -392,9 +539,9 @@ class ResourceSearchResult(proto.Message): To search against ``project``: - - use a field query. Example: ``project:12345`` - - use a free text query. Example: ``12345`` - - specify the ``scope`` field as this project in your + - Use a field query. Example: ``project:12345`` + - Use a free text query. Example: ``12345`` + - Specify the ``scope`` field as this project in your search request. folders (MutableSequence[str]): The folder(s) that this resource belongs to, in the form of @@ -403,9 +550,9 @@ class ResourceSearchResult(proto.Message): To search against ``folders``: - - use a field query. Example: ``folders:(123 OR 456)`` - - use a free text query. Example: ``123`` - - specify the ``scope`` field as this folder in your search + - Use a field query. Example: ``folders:(123 OR 456)`` + - Use a free text query. Example: ``123`` + - Specify the ``scope`` field as this folder in your search request. organization (str): The organization that this resource belongs to, in the form @@ -414,47 +561,47 @@ class ResourceSearchResult(proto.Message): To search against ``organization``: - - use a field query. Example: ``organization:123`` - - use a free text query. Example: ``123`` - - specify the ``scope`` field as this organization in your + - Use a field query. Example: ``organization:123`` + - Use a free text query. Example: ``123`` + - Specify the ``scope`` field as this organization in your search request. display_name (str): The display name of this resource. This field is available - only when the resource's proto contains it. + only when the resource's Protobuf contains it. To search against the ``display_name``: - - use a field query. Example: ``displayName:"My Instance"`` - - use a free text query. Example: ``"My Instance"`` + - Use a field query. Example: ``displayName:"My Instance"`` + - Use a free text query. Example: ``"My Instance"`` description (str): One or more paragraphs of text description of this resource. Maximum length could be up to 1M bytes. This field is - available only when the resource's proto contains it. + available only when the resource's Protobuf contains it. To search against the ``description``: - - use a field query. Example: + - Use a field query. Example: ``description:"important instance"`` - - use a free text query. Example: ``"important instance"`` + - Use a free text query. Example: ``"important instance"`` location (str): Location can be ``global``, regional like ``us-east1``, or zonal like ``us-west1-b``. This field is available only when - the resource's proto contains it. + the resource's Protobuf contains it. To search against the ``location``: - - use a field query. Example: ``location:us-west*`` - - use a free text query. Example: ``us-west*`` + - Use a field query. Example: ``location:us-west*`` + - Use a free text query. Example: ``us-west*`` labels (MutableMapping[str, str]): Labels associated with this resource. See `Labelling and - grouping GCP + grouping Google Cloud resources `__ for more information. This field is available only when the - resource's proto contains it. + resource's Protobuf contains it. To search against the ``labels``: - - use a field query: + - Use a field query: - query on any label's key or value. Example: ``labels:prod`` @@ -462,40 +609,59 @@ class ResourceSearchResult(proto.Message): - query by a given label's existence. Example: ``labels.env:*`` - - use a free text query. Example: ``prod`` + - Use a free text query. Example: ``prod`` network_tags (MutableSequence[str]): Network tags associated with this resource. Like labels, - network tags are a type of annotations used to group GCP - resources. See `Labelling GCP + network tags are a type of annotations used to group Google + Cloud resources. See `Labelling Google Cloud resources `__ for more information. This field is available only when the - resource's proto contains it. + resource's Protobuf contains it. To search against the ``network_tags``: - - use a field query. Example: ``networkTags:internal`` - - use a free text query. Example: ``internal`` + - Use a field query. Example: ``networkTags:internal`` + - Use a free text query. Example: ``internal`` kms_key (str): The Cloud KMS - `CryptoKey `__ + `CryptoKey `__ name or - `CryptoKeyVersion `__ - name. This field is available only when the resource's proto - contains it. + `CryptoKeyVersion `__ + name. + + This field only presents for the purpose of backward + compatibility. Please use the ``kms_keys`` field to retrieve + Cloud KMS key information. This field is available only when + the resource's Protobuf contains it and will only be + populated for `these resource + types `__ + for backward compatible purposes. To search against the ``kms_key``: - - use a field query. Example: ``kmsKey:key`` - - use a free text query. Example: ``key`` + - Use a field query. Example: ``kmsKey:key`` + - Use a free text query. Example: ``key`` + kms_keys (MutableSequence[str]): + The Cloud KMS + `CryptoKey `__ + names or + `CryptoKeyVersion `__ + names. This field is available only when the resource's + Protobuf contains it. + + To search against the ``kms_keys``: + + - Use a field query. Example: ``kmsKeys:key`` + - Use a free text query. Example: ``key`` create_time (google.protobuf.timestamp_pb2.Timestamp): The create timestamp of this resource, at which the resource was created. The granularity is in seconds. Timestamp.nanos will always be 0. This field is available only when the - resource's proto contains it. + resource's Protobuf contains it. To search against ``create_time``: - - use a field query. + - Use a field query. - value in seconds since unix epoch. Example: ``createTime > 1609459200`` @@ -507,11 +673,11 @@ class ResourceSearchResult(proto.Message): The last update timestamp of this resource, at which the resource was last modified or deleted. The granularity is in seconds. Timestamp.nanos will always be 0. This field is - available only when the resource's proto contains it. + available only when the resource's Protobuf contains it. To search against ``update_time``: - - use a field query. + - Use a field query. - value in seconds since unix epoch. Example: ``updateTime < 1609459200`` @@ -523,42 +689,44 @@ class ResourceSearchResult(proto.Message): The state of this resource. Different resources types have different state definitions that are mapped from various fields of different resource types. This field is available - only when the resource's proto contains it. + only when the resource's Protobuf contains it. Example: If the resource is an instance provided by Compute Engine, its state will include PROVISIONING, STAGING, RUNNING, STOPPING, SUSPENDING, SUSPENDED, REPAIRING, and TERMINATED. See ``status`` definition in `API Reference `__. - If the resource is a project provided by Cloud Resource - Manager, its state will include LIFECYCLE_STATE_UNSPECIFIED, - ACTIVE, DELETE_REQUESTED and DELETE_IN_PROGRESS. See + If the resource is a project provided by Resource Manager, + its state will include LIFECYCLE_STATE_UNSPECIFIED, ACTIVE, + DELETE_REQUESTED and DELETE_IN_PROGRESS. See ``lifecycleState`` definition in `API Reference `__. To search against the ``state``: - - use a field query. Example: ``state:RUNNING`` - - use a free text query. Example: ``RUNNING`` + - Use a field query. Example: ``state:RUNNING`` + - Use a free text query. Example: ``RUNNING`` additional_attributes (google.protobuf.struct_pb2.Struct): The additional searchable attributes of this resource. The attributes may vary from one resource type to another. Examples: ``projectId`` for Project, ``dnsName`` for DNS ManagedZone. This field contains a subset of the resource metadata fields that are returned by the List or Get APIs - provided by the corresponding GCP service (e.g., Compute - Engine). see `API references and supported searchable + provided by the corresponding Google Cloud service (e.g., + Compute Engine). see `API references and supported + searchable attributes `__ to see which fields are included. You can search values of these fields through free text search. However, you should not consume the field programically as the field names and values may change as - the GCP service updates to a new incompatible API version. + the Google Cloud service updates to a new incompatible API + version. To search against the ``additional_attributes``: - - use a free text query to match the attributes values. + - Use a free text query to match the attributes values. Example: to search ``additional_attributes = { dnsName: "foobar" }``, you can issue a query ``foobar``. @@ -566,18 +734,85 @@ class ResourceSearchResult(proto.Message): The full resource name of this resource's parent, if it has one. To search against the ``parent_full_resource_name``: - - use a field query. Example: + - Use a field query. Example: ``parentFullResourceName:"project-name"`` - - use a free text query. Example: ``project-name`` + - Use a free text query. Example: ``project-name`` + versioned_resources (MutableSequence[google.cloud.asset_v1.types.VersionedResource]): + Versioned resource representations of this resource. This is + repeated because there could be multiple versions of + resource representations during version migration. + + This ``versioned_resources`` field is not searchable. Some + attributes of the resource representations are exposed in + ``additional_attributes`` field, so as to allow users to + search on them. + attached_resources (MutableSequence[google.cloud.asset_v1.types.AttachedResource]): + Attached resources of this resource. For example, an + OSConfig Inventory is an attached resource of a Compute + Instance. This field is repeated because a resource could + have multiple attached resources. + + This ``attached_resources`` field is not searchable. Some + attributes of the attached resources are exposed in + ``additional_attributes`` field, so as to allow users to + search on them. + relationships (MutableMapping[str, google.cloud.asset_v1.types.RelatedResources]): + A map of related resources of this resource, keyed by the + relationship type. A relationship type is in the format of + {SourceType}*{ACTION}*\ {DestType}. Example: + ``DISK_TO_INSTANCE``, ``DISK_TO_NETWORK``, + ``INSTANCE_TO_INSTANCEGROUP``. See `supported relationship + types `__. + tag_keys (MutableSequence[str]): + TagKey namespaced names, in the format of + {ORG_ID}/{TAG_KEY_SHORT_NAME}. To search against the + ``tagKeys``: + + - Use a field query. Example: + + - ``tagKeys:"123456789/env*"`` + - ``tagKeys="123456789/env"`` + - ``tagKeys:"env"`` + + - Use a free text query. Example: + + - ``env`` + tag_values (MutableSequence[str]): + TagValue namespaced names, in the format of + {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}. To + search against the ``tagValues``: + + - Use a field query. Example: + + - ``tagValues:"env"`` + - ``tagValues:"env/prod"`` + - ``tagValues:"123456789/env/prod*"`` + - ``tagValues="123456789/env/prod"`` + + - Use a free text query. Example: + + - ``prod`` + tag_value_ids (MutableSequence[str]): + TagValue IDs, in the format of tagValues/{TAG_VALUE_ID}. To + search against the ``tagValueIds``: + + - Use a field query. Example: + + - ``tagValueIds:"456"`` + - ``tagValueIds="tagValues/456"`` + + - Use a free text query. Example: + + - ``456`` parent_asset_type (str): The type of this resource's immediate parent, if there is one. To search against the ``parent_asset_type``: - - use a field query. Example: + - Use a field query. Example: ``parentAssetType:"cloudresourcemanager.googleapis.com/Project"`` - - use a free text query. Example: + - Use a free text query. Example: ``cloudresourcemanager.googleapis.com/Project`` """ @@ -626,6 +861,10 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=10, ) + kms_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, @@ -649,12 +888,148 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=19, ) + versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=16, + message='VersionedResource', + ) + attached_resources: MutableSequence['AttachedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message='AttachedResource', + ) + relationships: MutableMapping[str, 'RelatedResources'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=21, + message='RelatedResources', + ) + tag_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=23, + ) + tag_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=25, + ) + tag_value_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=26, + ) parent_asset_type: str = proto.Field( proto.STRING, number=103, ) +class VersionedResource(proto.Message): + r"""Resource representation as defined by the corresponding + service providing the resource for a given API version. + + Attributes: + version (str): + API version of the resource. + + Example: If the resource is an instance provided by Compute + Engine v1 API as defined in + ``https://cloud.google.com/compute/docs/reference/rest/v1/instances``, + version will be "v1". + resource (google.protobuf.struct_pb2.Struct): + JSON representation of the resource as defined by the + corresponding service providing this resource. + + Example: If the resource is an instance provided by Compute + Engine, this field will contain the JSON representation of + the instance as defined by Compute Engine: + ``https://cloud.google.com/compute/docs/reference/rest/v1/instances``. + + You can find the resource definition for each supported + resource type in this table: + ``https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types`` + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + resource: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class AttachedResource(proto.Message): + r"""Attached resource representation, which is defined by the + corresponding service provider. It represents an attached + resource's payload. + + Attributes: + asset_type (str): + The type of this attached resource. + + Example: ``osconfig.googleapis.com/Inventory`` + + You can find the supported attached asset types of each + resource in this table: + ``https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types`` + versioned_resources (MutableSequence[google.cloud.asset_v1.types.VersionedResource]): + Versioned resource representations of this + attached resource. This is repeated because + there could be multiple versions of the attached + resource representations during version + migration. + """ + + asset_type: str = proto.Field( + proto.STRING, + number=1, + ) + versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='VersionedResource', + ) + + +class RelatedResources(proto.Message): + r"""The related resources of the primary resource. + + Attributes: + related_resources (MutableSequence[google.cloud.asset_v1.types.RelatedResource]): + The detailed related resources of the primary + resource. + """ + + related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RelatedResource', + ) + + +class RelatedResource(proto.Message): + r"""The detailed related resource. + + Attributes: + asset_type (str): + The type of the asset. Example: + ``compute.googleapis.com/Instance`` + full_resource_name (str): + The full resource name of the related resource. Example: + ``//compute.googleapis.com/projects/my_proj_123/zones/instance/instance123`` + """ + + asset_type: str = proto.Field( + proto.STRING, + number=1, + ) + full_resource_name: str = proto.Field( + proto.STRING, + number=2, + ) + + class IamPolicySearchResult(proto.Message): r"""A result of IAM Policy search, containing information of an IAM policy. @@ -680,12 +1055,12 @@ class IamPolicySearchResult(proto.Message): - specify the ``asset_types`` field in your search request. project (str): - The project that the associated GCP resource belongs to, in - the form of projects/{PROJECT_NUMBER}. If an IAM policy is - set on a resource (like VM instance, Cloud Storage bucket), - the project field will indicate the project that contains - the resource. If an IAM policy is set on a folder or - orgnization, this field will be empty. + The project that the associated Google Cloud resource + belongs to, in the form of projects/{PROJECT_NUMBER}. If an + IAM policy is set on a resource (like VM instance, Cloud + Storage bucket), the project field will indicate the project + that contains the resource. If an IAM policy is set on a + folder or orgnization, this field will be empty. To search against the ``project``: @@ -884,7 +1259,7 @@ class IamPolicyAnalysisResult(proto.Message): [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] policy attaches. iam_binding (google.iam.v1.policy_pb2.Binding): - The Cloud IAM policy binding under analysis. + The IAM policy binding under analysis. access_control_lists (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.AccessControlList]): The access control lists derived from the [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] diff --git a/tests/integration/goldens/asset/noxfile.py b/tests/integration/goldens/asset/noxfile.py index e8c4449425..c333de24af 100755 --- a/tests/integration/goldens/asset/noxfile.py +++ b/tests/integration/goldens/asset/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py new file mode 100755 index 0000000000..377f2e12b2 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeMove +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeMove_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_analyze_move(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeMoveRequest( + resource="resource_value", + destination_parent="destination_parent_value", + ) + + # Make the request + response = await client.analyze_move(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeMove_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py new file mode 100755 index 0000000000..5ec453c16d --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeMove +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeMove_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_analyze_move(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeMoveRequest( + resource="resource_value", + destination_parent="destination_parent_value", + ) + + # Make the request + response = client.analyze_move(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeMove_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py new file mode 100755 index 0000000000..625f3dec46 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_analyze_org_policies(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPoliciesRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py new file mode 100755 index 0000000000..892f5a15b3 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_analyze_org_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPoliciesRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py new file mode 100755 index 0000000000..81b67efb9d --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicyGovernedAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_analyze_org_policy_governed_assets(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_assets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py new file mode 100755 index 0000000000..86aca87da3 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicyGovernedAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_analyze_org_policy_governed_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_assets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py new file mode 100755 index 0000000000..3a24500e39 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicyGovernedContainers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_analyze_org_policy_governed_containers(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_containers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py new file mode 100755 index 0000000000..4aa243c92f --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicyGovernedContainers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_analyze_org_policy_governed_containers(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_containers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py new file mode 100755 index 0000000000..30ef0c4e53 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetEffectiveIamPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_batch_get_effective_iam_policies(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetEffectiveIamPoliciesRequest( + scope="scope_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + response = await client.batch_get_effective_iam_policies(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py new file mode 100755 index 0000000000..f64e953a89 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetEffectiveIamPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_batch_get_effective_iam_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetEffectiveIamPoliciesRequest( + scope="scope_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + response = client.batch_get_effective_iam_policies(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py new file mode 100755 index 0000000000..b495af664e --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_CreateSavedQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_create_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.CreateSavedQueryRequest( + parent="parent_value", + saved_query_id="saved_query_id_value", + ) + + # Make the request + response = await client.create_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_CreateSavedQuery_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py new file mode 100755 index 0000000000..bdebace2df --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_CreateSavedQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_create_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.CreateSavedQueryRequest( + parent="parent_value", + saved_query_id="saved_query_id_value", + ) + + # Make the request + response = client.create_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_CreateSavedQuery_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py new file mode 100755 index 0000000000..40cdf7f6ff --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_DeleteSavedQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_delete_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.DeleteSavedQueryRequest( + name="name_value", + ) + + # Make the request + await client.delete_saved_query(request=request) + + +# [END cloudasset_v1_generated_AssetService_DeleteSavedQuery_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py new file mode 100755 index 0000000000..29d2ed16f1 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_DeleteSavedQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_delete_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.DeleteSavedQueryRequest( + name="name_value", + ) + + # Make the request + client.delete_saved_query(request=request) + + +# [END cloudasset_v1_generated_AssetService_DeleteSavedQuery_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py new file mode 100755 index 0000000000..7f2185105f --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_GetSavedQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_get_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.GetSavedQueryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_GetSavedQuery_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py new file mode 100755 index 0000000000..947c761d88 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_GetSavedQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_get_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.GetSavedQueryRequest( + name="name_value", + ) + + # Make the request + response = client.get_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_GetSavedQuery_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py new file mode 100755 index 0000000000..c4be56a4f0 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSavedQueries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_ListSavedQueries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_list_saved_queries(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.ListSavedQueriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_saved_queries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_ListSavedQueries_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py new file mode 100755 index 0000000000..9825502a60 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSavedQueries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_ListSavedQueries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_list_saved_queries(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListSavedQueriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_saved_queries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_ListSavedQueries_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py new file mode 100755 index 0000000000..bd68125caf --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_QueryAssets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_query_assets(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.QueryAssetsRequest( + statement="statement_value", + parent="parent_value", + ) + + # Make the request + response = await client.query_assets(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_QueryAssets_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py new file mode 100755 index 0000000000..e64414fdbc --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_QueryAssets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_query_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.QueryAssetsRequest( + statement="statement_value", + parent="parent_value", + ) + + # Make the request + response = client.query_assets(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_QueryAssets_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py new file mode 100755 index 0000000000..56aaab9fdc --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_UpdateSavedQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_update_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.UpdateSavedQueryRequest( + ) + + # Make the request + response = await client.update_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py new file mode 100755 index 0000000000..ef6a45d637 --- /dev/null +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_update_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.UpdateSavedQueryRequest( + ) + + # Make the request + response = client.update_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 5db3da5867..5a90dfa88b 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -317,6 +317,690 @@ ], "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_move", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeMove", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeMove" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", + "shortName": "analyze_move" + }, + "description": "Sample for AnalyzeMove", + "file": "cloudasset_v1_generated_asset_service_analyze_move_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeMove_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_move_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_move", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeMove", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeMove" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", + "shortName": "analyze_move" + }, + "description": "Sample for AnalyzeMove", + "file": "cloudasset_v1_generated_asset_service_analyze_move_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeMove_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_move_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_org_policies", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager", + "shortName": "analyze_org_policies" + }, + "description": "Sample for AnalyzeOrgPolicies", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_org_policies", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager", + "shortName": "analyze_org_policies" + }, + "description": "Sample for AnalyzeOrgPolicies", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_org_policy_governed_assets", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicyGovernedAssets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager", + "shortName": "analyze_org_policy_governed_assets" + }, + "description": "Sample for AnalyzeOrgPolicyGovernedAssets", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_assets", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicyGovernedAssets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager", + "shortName": "analyze_org_policy_governed_assets" + }, + "description": "Sample for AnalyzeOrgPolicyGovernedAssets", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_org_policy_governed_containers", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicyGovernedContainers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager", + "shortName": "analyze_org_policy_governed_containers" + }, + "description": "Sample for AnalyzeOrgPolicyGovernedContainers", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_containers", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicyGovernedContainers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager", + "shortName": "analyze_org_policy_governed_containers" + }, + "description": "Sample for AnalyzeOrgPolicyGovernedContainers", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py" + }, { "canonical": true, "clientMethod": { @@ -327,17 +1011,971 @@ }, "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.batch_get_assets_history", "method": { - "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", + "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "BatchGetAssetsHistory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", + "shortName": "batch_get_assets_history" + }, + "description": "Sample for BatchGetAssetsHistory", + "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "BatchGetAssetsHistory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", + "shortName": "batch_get_assets_history" + }, + "description": "Sample for BatchGetAssetsHistory", + "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.batch_get_effective_iam_policies", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "BatchGetEffectiveIamPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", + "shortName": "batch_get_effective_iam_policies" + }, + "description": "Sample for BatchGetEffectiveIamPolicies", + "file": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.batch_get_effective_iam_policies", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "BatchGetEffectiveIamPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", + "shortName": "batch_get_effective_iam_policies" + }, + "description": "Sample for BatchGetEffectiveIamPolicies", + "file": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.create_feed", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "CreateFeed" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateFeedRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "create_feed" + }, + "description": "Sample for CreateFeed", + "file": "cloudasset_v1_generated_asset_service_create_feed_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_create_feed_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.create_feed", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "CreateFeed" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateFeedRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "create_feed" + }, + "description": "Sample for CreateFeed", + "file": "cloudasset_v1_generated_asset_service_create_feed_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_create_feed_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.create_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "CreateSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateSavedQueryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "saved_query", + "type": "google.cloud.asset_v1.types.SavedQuery" + }, + { + "name": "saved_query_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "create_saved_query" + }, + "description": "Sample for CreateSavedQuery", + "file": "cloudasset_v1_generated_asset_service_create_saved_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_CreateSavedQuery_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_create_saved_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.create_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "CreateSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateSavedQueryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "saved_query", + "type": "google.cloud.asset_v1.types.SavedQuery" + }, + { + "name": "saved_query_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "create_saved_query" + }, + "description": "Sample for CreateSavedQuery", + "file": "cloudasset_v1_generated_asset_service_create_saved_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_CreateSavedQuery_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_create_saved_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.delete_feed", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "DeleteFeed" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_feed" + }, + "description": "Sample for DeleteFeed", + "file": "cloudasset_v1_generated_asset_service_delete_feed_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_delete_feed_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.delete_feed", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "DeleteFeed" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_feed" + }, + "description": "Sample for DeleteFeed", + "file": "cloudasset_v1_generated_asset_service_delete_feed_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_delete_feed_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.delete_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "DeleteSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteSavedQueryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_saved_query" + }, + "description": "Sample for DeleteSavedQuery", + "file": "cloudasset_v1_generated_asset_service_delete_saved_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteSavedQuery_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_delete_saved_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.delete_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "DeleteSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteSavedQueryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_saved_query" + }, + "description": "Sample for DeleteSavedQuery", + "file": "cloudasset_v1_generated_asset_service_delete_saved_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteSavedQuery_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_delete_saved_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.export_assets", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "BatchGetAssetsHistory" + "shortName": "ExportAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + "type": "google.cloud.asset_v1.types.ExportAssetsRequest" }, { "name": "retry", @@ -352,22 +1990,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", - "shortName": "batch_get_assets_history" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_assets" }, - "description": "Sample for BatchGetAssetsHistory", - "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py", + "description": "Sample for ExportAssets", + "file": "cloudasset_v1_generated_asset_service_export_assets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async", + "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_async", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -377,22 +2015,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py" + "title": "cloudasset_v1_generated_asset_service_export_assets_async.py" }, { "canonical": true, @@ -401,19 +2039,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", + "fullName": "google.cloud.asset_v1.AssetServiceClient.export_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", + "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "BatchGetAssetsHistory" + "shortName": "ExportAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + "type": "google.cloud.asset_v1.types.ExportAssetsRequest" }, { "name": "retry", @@ -428,22 +2066,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", - "shortName": "batch_get_assets_history" + "resultType": "google.api_core.operation.Operation", + "shortName": "export_assets" }, - "description": "Sample for BatchGetAssetsHistory", - "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py", + "description": "Sample for ExportAssets", + "file": "cloudasset_v1_generated_asset_service_export_assets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync", + "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_sync", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -453,22 +2091,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py" + "title": "cloudasset_v1_generated_asset_service_export_assets_sync.py" }, { "canonical": true, @@ -478,22 +2116,22 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.create_feed", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.get_feed", "method": { - "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", + "fullName": "google.cloud.asset.v1.AssetService.GetFeed", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "CreateFeed" + "shortName": "GetFeed" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.CreateFeedRequest" + "type": "google.cloud.asset_v1.types.GetFeedRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -510,21 +2148,21 @@ } ], "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "create_feed" + "shortName": "get_feed" }, - "description": "Sample for CreateFeed", - "file": "cloudasset_v1_generated_asset_service_create_feed_async.py", + "description": "Sample for GetFeed", + "file": "cloudasset_v1_generated_asset_service_get_feed_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_async", + "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_async", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -534,22 +2172,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_create_feed_async.py" + "title": "cloudasset_v1_generated_asset_service_get_feed_async.py" }, { "canonical": true, @@ -558,22 +2196,22 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.create_feed", + "fullName": "google.cloud.asset_v1.AssetServiceClient.get_feed", "method": { - "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", + "fullName": "google.cloud.asset.v1.AssetService.GetFeed", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "CreateFeed" + "shortName": "GetFeed" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.CreateFeedRequest" + "type": "google.cloud.asset_v1.types.GetFeedRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -590,21 +2228,21 @@ } ], "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "create_feed" + "shortName": "get_feed" }, - "description": "Sample for CreateFeed", - "file": "cloudasset_v1_generated_asset_service_create_feed_sync.py", + "description": "Sample for GetFeed", + "file": "cloudasset_v1_generated_asset_service_get_feed_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_sync", + "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -614,22 +2252,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_create_feed_sync.py" + "title": "cloudasset_v1_generated_asset_service_get_feed_sync.py" }, { "canonical": true, @@ -639,19 +2277,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.delete_feed", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.get_saved_query", "method": { - "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", + "fullName": "google.cloud.asset.v1.AssetService.GetSavedQuery", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "DeleteFeed" + "shortName": "GetSavedQuery" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + "type": "google.cloud.asset_v1.types.GetSavedQueryRequest" }, { "name": "name", @@ -670,21 +2308,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_feed" + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "get_saved_query" }, - "description": "Sample for DeleteFeed", - "file": "cloudasset_v1_generated_asset_service_delete_feed_async.py", + "description": "Sample for GetSavedQuery", + "file": "cloudasset_v1_generated_asset_service_get_saved_query_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_async", + "regionTag": "cloudasset_v1_generated_AssetService_GetSavedQuery_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -699,15 +2338,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_delete_feed_async.py" + "title": "cloudasset_v1_generated_asset_service_get_saved_query_async.py" }, { "canonical": true, @@ -716,19 +2357,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.delete_feed", + "fullName": "google.cloud.asset_v1.AssetServiceClient.get_saved_query", "method": { - "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", + "fullName": "google.cloud.asset.v1.AssetService.GetSavedQuery", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "DeleteFeed" + "shortName": "GetSavedQuery" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + "type": "google.cloud.asset_v1.types.GetSavedQueryRequest" }, { "name": "name", @@ -747,21 +2388,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_feed" + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "get_saved_query" }, - "description": "Sample for DeleteFeed", - "file": "cloudasset_v1_generated_asset_service_delete_feed_sync.py", + "description": "Sample for GetSavedQuery", + "file": "cloudasset_v1_generated_asset_service_get_saved_query_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_sync", + "regionTag": "cloudasset_v1_generated_AssetService_GetSavedQuery_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -776,15 +2418,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_delete_feed_sync.py" + "title": "cloudasset_v1_generated_asset_service_get_saved_query_sync.py" }, { "canonical": true, @@ -794,19 +2438,23 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.export_assets", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", + "fullName": "google.cloud.asset.v1.AssetService.ListAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ExportAssets" + "shortName": "ListAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ExportAssetsRequest" + "type": "google.cloud.asset_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -821,22 +2469,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_assets" + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager", + "shortName": "list_assets" }, - "description": "Sample for ExportAssets", - "file": "cloudasset_v1_generated_asset_service_export_assets_async.py", + "description": "Sample for ListAssets", + "file": "cloudasset_v1_generated_asset_service_list_assets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_async", + "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_async", "segments": [ { - "end": 59, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 52, "start": 27, "type": "SHORT" }, @@ -846,22 +2494,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_export_assets_async.py" + "title": "cloudasset_v1_generated_asset_service_list_assets_async.py" }, { "canonical": true, @@ -870,19 +2518,23 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.export_assets", + "fullName": "google.cloud.asset_v1.AssetServiceClient.list_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", + "fullName": "google.cloud.asset.v1.AssetService.ListAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ExportAssets" + "shortName": "ListAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ExportAssetsRequest" + "type": "google.cloud.asset_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -897,22 +2549,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_assets" + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager", + "shortName": "list_assets" }, - "description": "Sample for ExportAssets", - "file": "cloudasset_v1_generated_asset_service_export_assets_sync.py", + "description": "Sample for ListAssets", + "file": "cloudasset_v1_generated_asset_service_list_assets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_sync", + "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_sync", "segments": [ { - "end": 59, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 52, "start": 27, "type": "SHORT" }, @@ -922,22 +2574,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_export_assets_sync.py" + "title": "cloudasset_v1_generated_asset_service_list_assets_sync.py" }, { "canonical": true, @@ -947,22 +2599,22 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.get_feed", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_feeds", "method": { - "fullName": "google.cloud.asset.v1.AssetService.GetFeed", + "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "GetFeed" + "shortName": "ListFeeds" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.GetFeedRequest" + "type": "google.cloud.asset_v1.types.ListFeedsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -978,14 +2630,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "get_feed" + "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", + "shortName": "list_feeds" }, - "description": "Sample for GetFeed", - "file": "cloudasset_v1_generated_asset_service_get_feed_async.py", + "description": "Sample for ListFeeds", + "file": "cloudasset_v1_generated_asset_service_list_feeds_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_async", + "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_async", "segments": [ { "end": 51, @@ -1018,7 +2670,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_get_feed_async.py" + "title": "cloudasset_v1_generated_asset_service_list_feeds_async.py" }, { "canonical": true, @@ -1027,22 +2679,22 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.get_feed", + "fullName": "google.cloud.asset_v1.AssetServiceClient.list_feeds", "method": { - "fullName": "google.cloud.asset.v1.AssetService.GetFeed", + "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "GetFeed" + "shortName": "ListFeeds" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.GetFeedRequest" + "type": "google.cloud.asset_v1.types.ListFeedsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1058,14 +2710,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "get_feed" + "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", + "shortName": "list_feeds" }, - "description": "Sample for GetFeed", - "file": "cloudasset_v1_generated_asset_service_get_feed_sync.py", + "description": "Sample for ListFeeds", + "file": "cloudasset_v1_generated_asset_service_list_feeds_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_sync", + "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_sync", "segments": [ { "end": 51, @@ -1098,7 +2750,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_get_feed_sync.py" + "title": "cloudasset_v1_generated_asset_service_list_feeds_sync.py" }, { "canonical": true, @@ -1108,19 +2760,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_assets", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_saved_queries", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListAssets", + "fullName": "google.cloud.asset.v1.AssetService.ListSavedQueries", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ListAssets" + "shortName": "ListSavedQueries" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ListAssetsRequest" + "type": "google.cloud.asset_v1.types.ListSavedQueriesRequest" }, { "name": "parent", @@ -1139,14 +2791,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager", - "shortName": "list_assets" + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager", + "shortName": "list_saved_queries" }, - "description": "Sample for ListAssets", - "file": "cloudasset_v1_generated_asset_service_list_assets_async.py", + "description": "Sample for ListSavedQueries", + "file": "cloudasset_v1_generated_asset_service_list_saved_queries_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_async", + "regionTag": "cloudasset_v1_generated_AssetService_ListSavedQueries_async", "segments": [ { "end": 52, @@ -1179,7 +2831,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_list_assets_async.py" + "title": "cloudasset_v1_generated_asset_service_list_saved_queries_async.py" }, { "canonical": true, @@ -1188,19 +2840,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.list_assets", + "fullName": "google.cloud.asset_v1.AssetServiceClient.list_saved_queries", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListAssets", + "fullName": "google.cloud.asset.v1.AssetService.ListSavedQueries", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ListAssets" + "shortName": "ListSavedQueries" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ListAssetsRequest" + "type": "google.cloud.asset_v1.types.ListSavedQueriesRequest" }, { "name": "parent", @@ -1219,14 +2871,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager", - "shortName": "list_assets" + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager", + "shortName": "list_saved_queries" }, - "description": "Sample for ListAssets", - "file": "cloudasset_v1_generated_asset_service_list_assets_sync.py", + "description": "Sample for ListSavedQueries", + "file": "cloudasset_v1_generated_asset_service_list_saved_queries_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_sync", + "regionTag": "cloudasset_v1_generated_AssetService_ListSavedQueries_sync", "segments": [ { "end": 52, @@ -1259,7 +2911,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_list_assets_sync.py" + "title": "cloudasset_v1_generated_asset_service_list_saved_queries_sync.py" }, { "canonical": true, @@ -1269,23 +2921,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_feeds", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.query_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", + "fullName": "google.cloud.asset.v1.AssetService.QueryAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ListFeeds" + "shortName": "QueryAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ListFeedsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.asset_v1.types.QueryAssetsRequest" }, { "name": "retry", @@ -1300,22 +2948,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", - "shortName": "list_feeds" + "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", + "shortName": "query_assets" }, - "description": "Sample for ListFeeds", - "file": "cloudasset_v1_generated_asset_service_list_feeds_async.py", + "description": "Sample for QueryAssets", + "file": "cloudasset_v1_generated_asset_service_query_assets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_async", + "regionTag": "cloudasset_v1_generated_AssetService_QueryAssets_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1325,22 +2973,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_list_feeds_async.py" + "title": "cloudasset_v1_generated_asset_service_query_assets_async.py" }, { "canonical": true, @@ -1349,23 +2997,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.list_feeds", + "fullName": "google.cloud.asset_v1.AssetServiceClient.query_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", + "fullName": "google.cloud.asset.v1.AssetService.QueryAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ListFeeds" + "shortName": "QueryAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ListFeedsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.asset_v1.types.QueryAssetsRequest" }, { "name": "retry", @@ -1380,22 +3024,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", - "shortName": "list_feeds" + "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", + "shortName": "query_assets" }, - "description": "Sample for ListFeeds", - "file": "cloudasset_v1_generated_asset_service_list_feeds_sync.py", + "description": "Sample for QueryAssets", + "file": "cloudasset_v1_generated_asset_service_query_assets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_sync", + "regionTag": "cloudasset_v1_generated_AssetService_QueryAssets_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1405,22 +3049,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_list_feeds_sync.py" + "title": "cloudasset_v1_generated_asset_service_query_assets_sync.py" }, { "canonical": true, @@ -1928,6 +3572,175 @@ } ], "title": "cloudasset_v1_generated_asset_service_update_feed_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.update_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.UpdateSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "UpdateSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.UpdateSavedQueryRequest" + }, + { + "name": "saved_query", + "type": "google.cloud.asset_v1.types.SavedQuery" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "update_saved_query" + }, + "description": "Sample for UpdateSavedQuery", + "file": "cloudasset_v1_generated_asset_service_update_saved_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_UpdateSavedQuery_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_update_saved_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.update_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.UpdateSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "UpdateSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.UpdateSavedQueryRequest" + }, + { + "name": "saved_query", + "type": "google.cloud.asset_v1.types.SavedQuery" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "update_saved_query" + }, + "description": "Sample for UpdateSavedQuery", + "file": "cloudasset_v1_generated_asset_service_update_saved_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_update_saved_query_sync.py" } ] } diff --git a/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py index 631ec8e32a..979373d925 100755 --- a/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py +++ b/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py @@ -39,18 +39,29 @@ def partition( class assetCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_iam_policy': ('analysis_query', 'execution_timeout', ), - 'analyze_iam_policy_longrunning': ('analysis_query', 'output_config', ), - 'batch_get_assets_history': ('parent', 'asset_names', 'content_type', 'read_time_window', ), + 'analyze_iam_policy': ('analysis_query', 'saved_analysis_query', 'execution_timeout', ), + 'analyze_iam_policy_longrunning': ('analysis_query', 'output_config', 'saved_analysis_query', ), + 'analyze_move': ('resource', 'destination_parent', 'view', ), + 'analyze_org_policies': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), + 'analyze_org_policy_governed_assets': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), + 'analyze_org_policy_governed_containers': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), + 'batch_get_assets_history': ('parent', 'asset_names', 'content_type', 'read_time_window', 'relationship_types', ), + 'batch_get_effective_iam_policies': ('scope', 'names', ), 'create_feed': ('parent', 'feed_id', 'feed', ), + 'create_saved_query': ('parent', 'saved_query', 'saved_query_id', ), 'delete_feed': ('name', ), - 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', ), + 'delete_saved_query': ('name', ), + 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', 'relationship_types', ), 'get_feed': ('name', ), - 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', ), + 'get_saved_query': ('name', ), + 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', 'relationship_types', ), 'list_feeds': ('parent', ), + 'list_saved_queries': ('parent', 'filter', 'page_size', 'page_token', ), + 'query_assets': ('parent', 'statement', 'job_reference', 'page_size', 'page_token', 'timeout', 'read_time_window', 'read_time', 'output_config', ), 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', 'asset_types', 'order_by', ), - 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', ), + 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', 'read_mask', ), 'update_feed': ('feed', 'update_mask', ), + 'update_saved_query': ('saved_query', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 4a12e28e3c..6bb3ed9ed9 100755 --- a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -53,11 +53,12 @@ from google.cloud.asset_v1.services.asset_service import transports from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore import google.auth @@ -1289,6 +1290,7 @@ def test_create_feed(request_type, transport: str = 'grpc'): asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], ) response = client.create_feed(request) @@ -1303,6 +1305,7 @@ def test_create_feed(request_type, transport: str = 'grpc'): assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] def test_create_feed_empty_call(): @@ -1343,6 +1346,7 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], )) response = await client.create_feed(request) @@ -1357,6 +1361,7 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] @pytest.mark.asyncio @@ -1533,6 +1538,7 @@ def test_get_feed(request_type, transport: str = 'grpc'): asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], ) response = client.get_feed(request) @@ -1547,6 +1553,7 @@ def test_get_feed(request_type, transport: str = 'grpc'): assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] def test_get_feed_empty_call(): @@ -1587,6 +1594,7 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], )) response = await client.get_feed(request) @@ -1601,6 +1609,7 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] @pytest.mark.asyncio @@ -2005,6 +2014,7 @@ def test_update_feed(request_type, transport: str = 'grpc'): asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], ) response = client.update_feed(request) @@ -2019,6 +2029,7 @@ def test_update_feed(request_type, transport: str = 'grpc'): assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] def test_update_feed_empty_call(): @@ -2059,6 +2070,7 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], )) response = await client.update_feed(request) @@ -2073,6 +2085,7 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] @pytest.mark.asyncio @@ -3625,43 +3638,5637 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): ) in kw['metadata'] +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeMoveRequest, + dict, +]) +def test_analyze_move(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeMoveResponse( + ) + response = client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeMoveResponse) + + +def test_analyze_move_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + client.analyze_move() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest() + +@pytest.mark.asyncio +async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( + )) + response = await client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeMoveResponse) + + +@pytest.mark.asyncio +async def test_analyze_move_async_from_dict(): + await test_analyze_move_async(request_type=dict) + + +def test_analyze_move_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeMoveRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + call.return_value = asset_service.AnalyzeMoveResponse() + client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_move_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeMoveRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse()) + await client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + asset_service.QueryAssetsRequest, + dict, +]) +def test_query_assets(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + ) + response = client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.QueryAssetsResponse) + assert response.job_reference == 'job_reference_value' + assert response.done is True + + +def test_query_assets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + client.query_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest() + +@pytest.mark.asyncio +async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + )) + response = await client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.QueryAssetsResponse) + assert response.job_reference == 'job_reference_value' + assert response.done is True + + +@pytest.mark.asyncio +async def test_query_assets_async_from_dict(): + await test_query_assets_async(request_type=dict) + + +def test_query_assets_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.QueryAssetsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + call.return_value = asset_service.QueryAssetsResponse() + client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_query_assets_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.QueryAssetsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse()) + await client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateSavedQueryRequest, + dict, +]) +def test_create_saved_query(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + response = client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +def test_create_saved_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + client.create_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest() + +@pytest.mark.asyncio +async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + response = await client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.asyncio +async def test_create_saved_query_async_from_dict(): + await test_create_saved_query_async(request_type=dict) + + +def test_create_saved_query_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.CreateSavedQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_saved_query_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.CreateSavedQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + await client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_saved_query_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_saved_query( + parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].saved_query + mock_val = asset_service.SavedQuery(name='name_value') + assert arg == mock_val + arg = args[0].saved_query_id + mock_val = 'saved_query_id_value' + assert arg == mock_val + + +def test_create_saved_query_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_saved_query( + asset_service.CreateSavedQueryRequest(), + parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', + ) + +@pytest.mark.asyncio +async def test_create_saved_query_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_saved_query( + parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].saved_query + mock_val = asset_service.SavedQuery(name='name_value') + assert arg == mock_val + arg = args[0].saved_query_id + mock_val = 'saved_query_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_saved_query_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_saved_query( + asset_service.CreateSavedQueryRequest(), + parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetSavedQueryRequest, + dict, +]) +def test_get_saved_query(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + response = client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +def test_get_saved_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + client.get_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest() + +@pytest.mark.asyncio +async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + response = await client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.asyncio +async def test_get_saved_query_async_from_dict(): + await test_get_saved_query_async(request_type=dict) + + +def test_get_saved_query_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.GetSavedQueryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_saved_query_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.GetSavedQueryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + await client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_saved_query_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_saved_query( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_saved_query_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_saved_query( + asset_service.GetSavedQueryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_saved_query_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_saved_query( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_saved_query_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_saved_query( + asset_service.GetSavedQueryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListSavedQueriesRequest, + dict, +]) +def test_list_saved_queries(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSavedQueriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_saved_queries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + client.list_saved_queries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + +@pytest.mark.asyncio +async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSavedQueriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_saved_queries_async_from_dict(): + await test_list_saved_queries_async(request_type=dict) + + +def test_list_saved_queries_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ListSavedQueriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + call.return_value = asset_service.ListSavedQueriesResponse() + client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_saved_queries_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ListSavedQueriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) + await client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_saved_queries_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListSavedQueriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_saved_queries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_saved_queries_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_saved_queries( + asset_service.ListSavedQueriesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_saved_queries_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListSavedQueriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_saved_queries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_saved_queries_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_saved_queries( + asset_service.ListSavedQueriesRequest(), + parent='parent_value', + ) + + +def test_list_saved_queries_pager(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_saved_queries(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.SavedQuery) + for i in results) +def test_list_saved_queries_pages(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + RuntimeError, + ) + pages = list(client.list_saved_queries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_saved_queries_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_saved_queries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, asset_service.SavedQuery) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_saved_queries_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_saved_queries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateSavedQueryRequest, + dict, +]) +def test_update_saved_query(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + response = client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +def test_update_saved_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + client.update_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest() + +@pytest.mark.asyncio +async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + response = await client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.asyncio +async def test_update_saved_query_async_from_dict(): + await test_update_saved_query_async(request_type=dict) + + +def test_update_saved_query_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.UpdateSavedQueryRequest() + + request.saved_query.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'saved_query.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_saved_query_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.UpdateSavedQueryRequest() + + request.saved_query.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + await client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'saved_query.name=name_value', + ) in kw['metadata'] + + +def test_update_saved_query_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_saved_query( + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].saved_query + mock_val = asset_service.SavedQuery(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_saved_query_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_saved_query( + asset_service.UpdateSavedQueryRequest(), + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_saved_query_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_saved_query( + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].saved_query + mock_val = asset_service.SavedQuery(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_saved_query_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_saved_query( + asset_service.UpdateSavedQueryRequest(), + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteSavedQueryRequest, + dict, +]) +def test_delete_saved_query(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_saved_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + client.delete_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest() + +@pytest.mark.asyncio +async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_saved_query_async_from_dict(): + await test_delete_saved_query_async(request_type=dict) + + +def test_delete_saved_query_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.DeleteSavedQueryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + call.return_value = None + client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_saved_query_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.DeleteSavedQueryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_saved_query_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_saved_query( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_saved_query_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_saved_query( + asset_service.DeleteSavedQueryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_saved_query_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_saved_query( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_saved_query_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_saved_query( + asset_service.DeleteSavedQueryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetEffectiveIamPoliciesRequest, + dict, +]) +def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( + ) + response = client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) + + +def test_batch_get_effective_iam_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + client.batch_get_effective_iam_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( + )) + response = await client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) + + +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_async_from_dict(): + await test_batch_get_effective_iam_policies_async(request_type=dict) + + +def test_batch_get_effective_iam_policies_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.BatchGetEffectiveIamPoliciesRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() + client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.BatchGetEffectiveIamPoliciesRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse()) + await client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPoliciesRequest, + dict, +]) +def test_analyze_org_policies(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + ) + response = client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_analyze_org_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + client.analyze_org_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + +@pytest.mark.asyncio +async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + )) + response = await client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPoliciesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_analyze_org_policies_async_from_dict(): + await test_analyze_org_policies_async(request_type=dict) + + +def test_analyze_org_policies_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPoliciesRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPoliciesResponse() + client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_org_policies_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPoliciesRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) + await client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +def test_analyze_org_policies_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_org_policies( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + + +def test_analyze_org_policies_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policies( + asset_service.AnalyzeOrgPoliciesRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_analyze_org_policies_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_org_policies( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_org_policies_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_org_policies( + asset_service.AnalyzeOrgPoliciesRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + +def test_analyze_org_policies_pager(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('scope', ''), + )), + ) + pager = client.analyze_org_policies(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in results) +def test_analyze_org_policies_pages(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + ), + RuntimeError, + ) + pages = list(client.analyze_org_policies(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_analyze_org_policies_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.analyze_org_policies(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in responses) + + +@pytest.mark.asyncio +async def test_analyze_org_policies_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.analyze_org_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + dict, +]) +def test_analyze_org_policy_governed_containers(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + ) + response = client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_analyze_org_policy_governed_containers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + client.analyze_org_policy_governed_containers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + )) + response = await client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_async_from_dict(): + await test_analyze_org_policy_governed_containers_async(request_type=dict) + + +def test_analyze_org_policy_governed_containers_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + await client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +def test_analyze_org_policy_governed_containers_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_org_policy_governed_containers( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + + +def test_analyze_org_policy_governed_containers_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policy_governed_containers( + asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_org_policy_governed_containers( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_org_policy_governed_containers( + asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + +def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('scope', ''), + )), + ) + pager = client.analyze_org_policy_governed_containers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) + for i in results) +def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + RuntimeError, + ) + pages = list(client.analyze_org_policy_governed_containers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + RuntimeError, + ) + async_pager = await client.analyze_org_policy_governed_containers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) + for i in responses) + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.analyze_org_policy_governed_containers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + dict, +]) +def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + ) + response = client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_analyze_org_policy_governed_assets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + client.analyze_org_policy_governed_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + )) + response = await client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_async_from_dict(): + await test_analyze_org_policy_governed_assets_async(request_type=dict) + + +def test_analyze_org_policy_governed_assets_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + await client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +def test_analyze_org_policy_governed_assets_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_org_policy_governed_assets( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + + +def test_analyze_org_policy_governed_assets_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policy_governed_assets( + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_org_policy_governed_assets( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_org_policy_governed_assets( + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + +def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('scope', ''), + )), + ) + pager = client.analyze_org_policy_governed_assets(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) + for i in results) +def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + RuntimeError, + ) + pages = list(client.analyze_org_policy_governed_assets(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + RuntimeError, + ) + async_pager = await client.analyze_org_policy_governed_assets(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) + for i in responses) + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.analyze_org_policy_governed_assets(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize("request_type", [ asset_service.ExportAssetsRequest, dict, ]) -def test_export_assets_rest(request_type): +def test_export_assets_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.export_assets(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_assets_rest_required_fields(request_type=asset_service.ExportAssetsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.export_assets(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_export_assets_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.export_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = asset_service.ExportAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ExportAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_assets(request) + + +def test_export_assets_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListAssetsRequest, + dict, +]) +def test_list_assets_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", "relationship_types", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_assets(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_assets_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", "relationshipTypes", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) + + request = asset_service.ListAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListAssetsResponse() + + client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_assets(request) + + +def test_list_assets_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_assets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=*/*}/assets" % client.transport._host, args[1]) + + +def test_list_assets_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assets( + asset_service.ListAssetsRequest(), + parent='parent_value', + ) + + +def test_list_assets_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + assets.Asset(), + ], + next_page_token='abc', + ), + asset_service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + ], + next_page_token='ghi', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.ListAssetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2'} + + pager = client.list_assets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assets.Asset) + for i in results) + + pages = list(client.list_assets(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetAssetsHistoryRequest, + dict, +]) +def test_batch_get_assets_history_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetAssetsHistoryResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_get_assets_history(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) + + +def test_batch_get_assets_history_rest_required_fields(request_type=asset_service.BatchGetAssetsHistoryRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", "relationship_types", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetAssetsHistoryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.batch_get_assets_history(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_get_assets_history_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", "relationshipTypes", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_assets_history_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + + request = asset_service.BatchGetAssetsHistoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.BatchGetAssetsHistoryResponse() + + client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetAssetsHistoryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_get_assets_history(request) + + +def test_batch_get_assets_history_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateFeedRequest, + dict, +]) +def test_create_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["feed_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["feedId"] = 'feed_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "feedId" in jsonified_request + assert jsonified_request["feedId"] == 'feed_id_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + + request = asset_service.CreateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_feed(request) + + +def test_create_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + + +def test_create_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_feed( + asset_service.CreateFeedRequest(), + parent='parent_value', + ) + + +def test_create_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetFeedRequest, + dict, +]) +def test_get_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + + request = asset_service.GetFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_feed(request) + + +def test_get_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'sample1/sample2/feeds/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + + +def test_get_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_feed( + asset_service.GetFeedRequest(), + name='name_value', + ) + + +def test_get_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListFeedsRequest, + dict, +]) +def test_list_feeds_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_feeds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.ListFeedsResponse) + + +def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_feeds(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_feeds_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_feeds._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_feeds_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) + + request = asset_service.ListFeedsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListFeedsResponse() + + client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListFeedsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_feeds(request) + + +def test_list_feeds_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_feeds(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + + +def test_list_feeds_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_feeds( + asset_service.ListFeedsRequest(), + parent='parent_value', + ) + + +def test_list_feeds_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateFeedRequest, + dict, +]) +def test_update_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + + request = asset_service.UpdateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_feed(request) + + +def test_update_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + + # get arguments that satisfy an http rule for this method + sample_request = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + feed=asset_service.Feed(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1]) + + +def test_update_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_feed( + asset_service.UpdateFeedRequest(), + feed=asset_service.Feed(name='name_value'), + ) + + +def test_update_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteFeedRequest, + dict, +]) +def test_delete_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_feed(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: + pre.assert_not_called() + pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = asset_service.DeleteFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_feed(request) + + +def test_delete_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'sample1/sample2/feeds/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + + +def test_delete_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_feed( + asset_service.DeleteFeedRequest(), + name='name_value', + ) + + +def test_delete_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllResourcesRequest, + dict, +]) +def test_search_all_resources_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.search_all_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllResourcesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["scope"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["scope"] = 'scope_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", "read_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.search_all_resources(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_search_all_resources_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.search_all_resources._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", "readMask", )) & set(("scope", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_resources_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) + + request = asset_service.SearchAllResourcesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllResourcesResponse() + + client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllResourcesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_all_resources(request) + + +def test_search_all_resources_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.search_all_resources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1]) + + +def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_resources( + asset_service.SearchAllResourcesRequest(), + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + + +def test_search_all_resources_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllResourcesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.SearchAllResourcesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.search_all_resources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assets.ResourceSearchResult) + for i in results) + + pages = list(client.search_all_resources(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllIamPoliciesRequest, + dict, +]) +def test_search_all_iam_policies_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.search_all_iam_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllIamPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["scope"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["scope"] = 'scope_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.search_all_iam_policies(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_search_all_iam_policies_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_iam_policies_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) + + request = asset_service.SearchAllIamPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllIamPoliciesResponse() + + client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllIamPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_all_iam_policies(request) + + +def test_search_all_iam_policies_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + query='query_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.search_all_iam_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1]) + + +def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_iam_policies( + asset_service.SearchAllIamPoliciesRequest(), + scope='scope_value', + query='query_value', + ) + + +def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.search_all_iam_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assets.IamPolicySearchResult) + for i in results) + + pages = list(client.search_all_iam_policies(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyRequest, + dict, +]) +def test_analyze_iam_policy_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.export_assets(request) + response = client.analyze_iam_policy(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) + assert response.fully_explored is True -def test_export_assets_rest_required_fields(request_type=asset_service.ExportAssetsRequest): +def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyRequest): transport_class = transports.AssetServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -3672,19 +9279,17 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("analysis_query", "execution_timeout", "saved_analysis_query", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3693,7 +9298,7 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = asset_service.AnalyzeIamPolicyResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -3705,20 +9310,21 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "post", + 'method': "get", 'query_params': pb_request, } - transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.export_assets(request) + response = client.analyze_iam_policy(request) expected_params = [ ] @@ -3726,15 +9332,15 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss assert expected_params == actual_params -def test_export_assets_rest_unset_required_fields(): +def test_analyze_iam_policy_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.export_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) + unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", "savedAnalysisQuery", )) & set(("analysisQuery", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_assets_rest_interceptors(null_interceptor): +def test_analyze_iam_policy_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -3742,12 +9348,11 @@ def test_export_assets_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) + pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3758,30 +9363,30 @@ def test_export_assets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) - request = asset_service.ExportAssetsRequest() + request = asset_service.AnalyzeIamPolicyRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = asset_service.AnalyzeIamPolicyResponse() - client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ExportAssetsRequest): +def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3791,10 +9396,10 @@ def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=as response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_assets(request) + client.analyze_iam_policy(request) -def test_export_assets_rest_error(): +def test_analyze_iam_policy_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -3802,46 +9407,41 @@ def test_export_assets_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, + asset_service.AnalyzeIamPolicyLongrunningRequest, dict, ]) -def test_list_assets_rest(request_type): +def test_analyze_iam_policy_longrunning_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) + return_value = operations_pb2.Operation(name='operations/spam') # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_assets(request) + response = client.analyze_iam_policy_longrunning(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.operation.name == "operations/spam" -def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsRequest): +def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): transport_class = transports.AssetServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -3852,21 +9452,15 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", )) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3875,7 +9469,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse() + return_value = operations_pb2.Operation(name='operations/spam') # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -3887,21 +9481,20 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "get", + 'method': "post", 'query_params': pb_request, } + transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - pb_return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_assets(request) + response = client.analyze_iam_policy_longrunning(request) expected_params = [ ] @@ -3909,15 +9502,15 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR assert expected_params == actual_params -def test_list_assets_rest_unset_required_fields(): +def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.list_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", )) & set(("parent", ))) + unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_assets_rest_interceptors(null_interceptor): +def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -3925,11 +9518,12 @@ def test_list_assets_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) + pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3940,30 +9534,30 @@ def test_list_assets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - request = asset_service.ListAssetsRequest() + request = asset_service.AnalyzeIamPolicyLongrunningRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.ListAssetsResponse() + post.return_value = operations_pb2.Operation() - client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListAssetsRequest): +def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3973,128 +9567,212 @@ def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asse response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_assets(request) + client.analyze_iam_policy_longrunning(request) -def test_list_assets_rest_flattened(): +def test_analyze_iam_policy_longrunning_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeMoveRequest, + dict, +]) +def test_analyze_move_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', + return_value = asset_service.AnalyzeMoveResponse( ) - mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeMoveResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + response = client.analyze_move(request) - client.list_assets(**mock_args) + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeMoveResponse) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/assets" % client.transport._host, args[1]) + +def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMoveRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["destination_parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "destinationParent" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "destinationParent" in jsonified_request + assert jsonified_request["destinationParent"] == request_init["destination_parent"] + + jsonified_request["resource"] = 'resource_value' + jsonified_request["destinationParent"] = 'destination_parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("destination_parent", "view", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "destinationParent" in jsonified_request + assert jsonified_request["destinationParent"] == 'destination_parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeMoveResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.AnalyzeMoveResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_move(request) + + expected_params = [ + ( + "destinationParent", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_move_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_move._get_unset_required_fields({}) + assert set(unset_fields) == (set(("destinationParent", "view", )) & set(("resource", "destinationParent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_move_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) + + request = asset_service.AnalyzeMoveRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeMoveResponse() + + client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() -def test_list_assets_rest_flattened_error(transport: str = 'rest'): +def test_analyze_move_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assets( - asset_service.ListAssetsRequest(), - parent='parent_value', - ) + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_move(request) -def test_list_assets_rest_pager(transport: str = 'rest'): +def test_analyze_move_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='rest' ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.ListAssetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_assets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) - - pages = list(client.list_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, + asset_service.QueryAssetsRequest, dict, ]) -def test_batch_get_assets_history_rest(request_type): +def test_query_assets_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4107,24 +9785,28 @@ def test_batch_get_assets_history_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse( + return_value = asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + pb_return_value = asset_service.QueryAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.batch_get_assets_history(request) + response = client.query_assets(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) + assert isinstance(response, asset_service.QueryAssetsResponse) + assert response.job_reference == 'job_reference_value' + assert response.done is True -def test_batch_get_assets_history_rest_required_fields(request_type=asset_service.BatchGetAssetsHistoryRequest): +def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -4139,16 +9821,14 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", )) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4162,7 +9842,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse() + return_value = asset_service.QueryAssetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -4174,21 +9854,22 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "get", + 'method': "post", 'query_params': pb_request, } + transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + pb_return_value = asset_service.QueryAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.batch_get_assets_history(request) + response = client.query_assets(request) expected_params = [ ] @@ -4196,15 +9877,15 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic assert expected_params == actual_params -def test_batch_get_assets_history_rest_unset_required_fields(): +def test_query_assets_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", )) & set(("parent", ))) + unset_fields = transport.query_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_assets_history_rest_interceptors(null_interceptor): +def test_query_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -4212,11 +9893,11 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) + pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4227,23 +9908,23 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + req.return_value._content = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) - request = asset_service.BatchGetAssetsHistoryRequest() + request = asset_service.QueryAssetsRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.BatchGetAssetsHistoryResponse() + post.return_value = asset_service.QueryAssetsResponse() - client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetAssetsHistoryRequest): +def test_query_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.QueryAssetsRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4260,10 +9941,10 @@ def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', requ response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.batch_get_assets_history(request) + client.query_assets(request) -def test_batch_get_assets_history_rest_error(): +def test_query_assets_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -4271,10 +9952,10 @@ def test_batch_get_assets_history_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, + asset_service.CreateSavedQueryRequest, dict, ]) -def test_create_feed_rest(request_type): +def test_create_saved_query_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4282,42 +9963,43 @@ def test_create_feed_rest(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'sample1/sample2'} + request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( + return_value = asset_service.SavedQuery( name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, + description='description_value', + creator='creator_value', + last_updater='last_updater_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.create_feed(request) + response = client.create_saved_query(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) + assert isinstance(response, asset_service.SavedQuery) assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' -def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedRequest): +def test_create_saved_query_rest_required_fields(request_type=asset_service.CreateSavedQueryRequest): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["feed_id"] = "" + request_init["saved_query_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -4327,23 +10009,28 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR )) # verify fields with default values are dropped + assert "savedQueryId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "savedQueryId" in jsonified_request + assert jsonified_request["savedQueryId"] == request_init["saved_query_id"] jsonified_request["parent"] = 'parent_value' - jsonified_request["feedId"] = 'feed_id_value' + jsonified_request["savedQueryId"] = 'saved_query_id_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("saved_query_id", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == 'parent_value' - assert "feedId" in jsonified_request - assert jsonified_request["feedId"] == 'feed_id_value' + assert "savedQueryId" in jsonified_request + assert jsonified_request["savedQueryId"] == 'saved_query_id_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4352,7 +10039,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -4373,29 +10060,33 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.create_feed(request) + response = client.create_saved_query(request) expected_params = [ + ( + "savedQueryId", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_create_feed_rest_unset_required_fields(): +def test_create_saved_query_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.create_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) + unset_fields = transport.create_saved_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(("savedQueryId", )) & set(("parent", "savedQuery", "savedQueryId", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_feed_rest_interceptors(null_interceptor): +def test_create_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -4403,11 +10094,11 @@ def test_create_feed_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) + pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4418,23 +10109,23 @@ def test_create_feed_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - request = asset_service.CreateFeedRequest() + request = asset_service.CreateSavedQueryRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.Feed() + post.return_value = asset_service.SavedQuery() - client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateFeedRequest): +def test_create_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateSavedQueryRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4442,6 +10133,7 @@ def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asse # send a request that will satisfy transcoding request_init = {'parent': 'sample1/sample2'} + request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4451,10 +10143,10 @@ def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asse response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_feed(request) + client.create_saved_query(request) -def test_create_feed_rest_flattened(): +def test_create_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4463,7 +10155,7 @@ def test_create_feed_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method sample_request = {'parent': 'sample1/sample2'} @@ -4471,27 +10163,29 @@ def test_create_feed_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.create_feed(**mock_args) + client.create_saved_query(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) -def test_create_feed_rest_flattened_error(transport: str = 'rest'): +def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4500,13 +10194,15 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_feed( - asset_service.CreateFeedRequest(), + client.create_saved_query( + asset_service.CreateSavedQueryRequest(), parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', ) -def test_create_feed_rest_error(): +def test_create_saved_query_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -4514,48 +10210,48 @@ def test_create_feed_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, + asset_service.GetSavedQueryRequest, dict, ]) -def test_get_feed_rest(request_type): +def test_get_saved_query_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( + return_value = asset_service.SavedQuery( name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, + description='description_value', + creator='creator_value', + last_updater='last_updater_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.get_feed(request) + response = client.get_saved_query(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) + assert isinstance(response, asset_service.SavedQuery) assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' -def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest): +def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSavedQueryRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -4570,14 +10266,14 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4591,7 +10287,7 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -4611,13 +10307,13 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.get_feed(request) + response = client.get_saved_query(request) expected_params = [ ] @@ -4625,15 +10321,15 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest assert expected_params == actual_params -def test_get_feed_rest_unset_required_fields(): +def test_get_saved_query_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.get_feed._get_unset_required_fields({}) + unset_fields = transport.get_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_feed_rest_interceptors(null_interceptor): +def test_get_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -4641,11 +10337,11 @@ def test_get_feed_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) + pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4656,30 +10352,30 @@ def test_get_feed_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - request = asset_service.GetFeedRequest() + request = asset_service.GetSavedQueryRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.Feed() + post.return_value = asset_service.SavedQuery() - client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetFeedRequest): +def test_get_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetSavedQueryRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4689,10 +10385,10 @@ def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_s response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_feed(request) + client.get_saved_query(request) -def test_get_feed_rest_flattened(): +def test_get_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4701,10 +10397,10 @@ def test_get_feed_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} + sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} # get truthy value for each flattened field mock_args = dict( @@ -4715,21 +10411,21 @@ def test_get_feed_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.get_feed(**mock_args) + client.get_saved_query(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) -def test_get_feed_rest_flattened_error(transport: str = 'rest'): +def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4738,13 +10434,13 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_feed( - asset_service.GetFeedRequest(), + client.get_saved_query( + asset_service.GetSavedQueryRequest(), name='name_value', ) -def test_get_feed_rest_error(): +def test_get_saved_query_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -4752,10 +10448,10 @@ def test_get_feed_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, + asset_service.ListSavedQueriesRequest, dict, ]) -def test_list_feeds_rest(request_type): +def test_list_saved_queries_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4768,24 +10464,26 @@ def test_list_feeds_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse( + return_value = asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + pb_return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_feeds(request) + response = client.list_saved_queries(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) + assert isinstance(response, pagers.ListSavedQueriesPager) + assert response.next_page_token == 'next_page_token_value' -def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsRequest): +def test_list_saved_queries_rest_required_fields(request_type=asset_service.ListSavedQueriesRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -4800,14 +10498,16 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4821,7 +10521,7 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse() + return_value = asset_service.ListSavedQueriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -4841,13 +10541,13 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + pb_return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_feeds(request) + response = client.list_saved_queries(request) expected_params = [ ] @@ -4855,15 +10555,15 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq assert expected_params == actual_params -def test_list_feeds_rest_unset_required_fields(): +def test_list_saved_queries_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.list_feeds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) + unset_fields = transport.list_saved_queries._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_feeds_rest_interceptors(null_interceptor): +def test_list_saved_queries_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -4871,11 +10571,11 @@ def test_list_feeds_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) + pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4886,23 +10586,23 @@ def test_list_feeds_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) + req.return_value._content = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) - request = asset_service.ListFeedsRequest() + request = asset_service.ListSavedQueriesRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.ListFeedsResponse() + post.return_value = asset_service.ListSavedQueriesResponse() - client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListFeedsRequest): +def test_list_saved_queries_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListSavedQueriesRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4919,10 +10619,10 @@ def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_feeds(request) + client.list_saved_queries(request) -def test_list_feeds_rest_flattened(): +def test_list_saved_queries_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4931,7 +10631,7 @@ def test_list_feeds_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse() + return_value = asset_service.ListSavedQueriesResponse() # get arguments that satisfy an http rule for this method sample_request = {'parent': 'sample1/sample2'} @@ -4945,85 +10645,141 @@ def test_list_feeds_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + pb_return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.list_feeds(**mock_args) + client.list_saved_queries(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) + + +def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_saved_queries( + asset_service.ListSavedQueriesRequest(), + parent='parent_value', + ) + + +def test_list_saved_queries_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + ) + # Two responses for two calls + response = response + response + # Wrap the values into proper Response objs + response = tuple(asset_service.ListSavedQueriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values -def test_list_feeds_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + sample_request = {'parent': 'sample1/sample2'} - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_feeds( - asset_service.ListFeedsRequest(), - parent='parent_value', - ) + pager = client.list_saved_queries(request=sample_request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.SavedQuery) + for i in results) -def test_list_feeds_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) + pages = list(client.list_saved_queries(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, + asset_service.UpdateSavedQueryRequest, dict, ]) -def test_update_feed_rest(request_type): +def test_update_saved_query_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( + return_value = asset_service.SavedQuery( name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, + description='description_value', + creator='creator_value', + last_updater='last_updater_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.update_feed(request) + response = client.update_saved_query(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) + assert isinstance(response, asset_service.SavedQuery) assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' -def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedRequest): +def test_update_saved_query_rest_required_fields(request_type=asset_service.UpdateSavedQueryRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -5037,12 +10793,14 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5054,7 +10812,7 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -5075,13 +10833,13 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.update_feed(request) + response = client.update_saved_query(request) expected_params = [ ] @@ -5089,15 +10847,15 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR assert expected_params == actual_params -def test_update_feed_rest_unset_required_fields(): +def test_update_saved_query_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.update_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) + unset_fields = transport.update_saved_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("savedQuery", "updateMask", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_feed_rest_interceptors(null_interceptor): +def test_update_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -5105,11 +10863,11 @@ def test_update_feed_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) + pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5120,30 +10878,31 @@ def test_update_feed_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - request = asset_service.UpdateFeedRequest() + request = asset_service.UpdateSavedQueryRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.Feed() + post.return_value = asset_service.SavedQuery() - client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateFeedRequest): +def test_update_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateSavedQueryRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5153,10 +10912,10 @@ def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asse response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_feed(request) + client.update_saved_query(request) -def test_update_feed_rest_flattened(): +def test_update_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5165,35 +10924,36 @@ def test_update_feed_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + sample_request = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} # get truthy value for each flattened field mock_args = dict( - feed=asset_service.Feed(name='name_value'), + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.update_feed(**mock_args) + client.update_saved_query(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{saved_query.name=*/*/savedQueries/*}" % client.transport._host, args[1]) -def test_update_feed_rest_flattened_error(transport: str = 'rest'): +def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5202,13 +10962,14 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_feed( - asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + client.update_saved_query( + asset_service.UpdateSavedQueryRequest(), + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) -def test_update_feed_rest_error(): +def test_update_saved_query_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -5216,17 +10977,17 @@ def test_update_feed_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, + asset_service.DeleteSavedQueryRequest, dict, ]) -def test_delete_feed_rest(request_type): +def test_delete_saved_query_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5241,13 +11002,13 @@ def test_delete_feed_rest(request_type): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.delete_feed(request) + response = client.delete_saved_query(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedRequest): +def test_delete_saved_query_rest_required_fields(request_type=asset_service.DeleteSavedQueryRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -5262,14 +11023,14 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5307,7 +11068,7 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.delete_feed(request) + response = client.delete_saved_query(request) expected_params = [ ] @@ -5315,15 +11076,15 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR assert expected_params == actual_params -def test_delete_feed_rest_unset_required_fields(): +def test_delete_saved_query_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.delete_feed._get_unset_required_fields({}) + unset_fields = transport.delete_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_feed_rest_interceptors(null_interceptor): +def test_delete_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -5331,9 +11092,9 @@ def test_delete_feed_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_saved_query") as pre: pre.assert_not_called() - pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) + pb_message = asset_service.DeleteSavedQueryRequest.pb(asset_service.DeleteSavedQueryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5345,26 +11106,26 @@ def test_delete_feed_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - request = asset_service.DeleteFeedRequest() + request = asset_service.DeleteSavedQueryRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() -def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteFeedRequest): +def test_delete_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteSavedQueryRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5374,10 +11135,10 @@ def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asse response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_feed(request) + client.delete_saved_query(request) -def test_delete_feed_rest_flattened(): +def test_delete_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5389,7 +11150,7 @@ def test_delete_feed_rest_flattened(): return_value = None # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} + sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} # get truthy value for each flattened field mock_args = dict( @@ -5404,16 +11165,16 @@ def test_delete_feed_rest_flattened(): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.delete_feed(**mock_args) + client.delete_saved_query(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) -def test_delete_feed_rest_flattened_error(transport: str = 'rest'): +def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5422,13 +11183,13 @@ def test_delete_feed_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_feed( - asset_service.DeleteFeedRequest(), + client.delete_saved_query( + asset_service.DeleteSavedQueryRequest(), name='name_value', ) -def test_delete_feed_rest_error(): +def test_delete_saved_query_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -5436,10 +11197,10 @@ def test_delete_feed_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, + asset_service.BatchGetEffectiveIamPoliciesRequest, dict, ]) -def test_search_all_resources_rest(request_type): +def test_batch_get_effective_iam_policies_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5452,30 +11213,29 @@ def test_search_all_resources_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + pb_return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.search_all_resources(request) + response = client.batch_get_effective_iam_policies(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' + assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) -def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): +def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["scope"] = "" + request_init["names"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -5485,22 +11245,28 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se )) # verify fields with default values are dropped + assert "names" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "names" in jsonified_request + assert jsonified_request["names"] == request_init["names"] jsonified_request["scope"] = 'scope_value' + jsonified_request["names"] = 'names_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + assert not set(unset_fields) - set(("names", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request assert jsonified_request["scope"] == 'scope_value' + assert "names" in jsonified_request + assert jsonified_request["names"] == 'names_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5509,7 +11275,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse() + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -5529,29 +11295,33 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + pb_return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.search_all_resources(request) + response = client.batch_get_effective_iam_policies(request) expected_params = [ + ( + "names", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_search_all_resources_rest_unset_required_fields(): +def test_batch_get_effective_iam_policies_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.search_all_resources._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) + unset_fields = transport.batch_get_effective_iam_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("names", )) & set(("scope", "names", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_resources_rest_interceptors(null_interceptor): +def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -5559,11 +11329,11 @@ def test_search_all_resources_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) + pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5574,23 +11344,23 @@ def test_search_all_resources_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) + req.return_value._content = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) - request = asset_service.SearchAllResourcesRequest() + request = asset_service.BatchGetEffectiveIamPoliciesRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.SearchAllResourcesResponse() + post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllResourcesRequest): +def test_batch_get_effective_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5607,132 +11377,21 @@ def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_ response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.search_all_resources(request) - - -def test_search_all_resources_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.search_all_resources(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1]) - - -def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_resources( - asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) + client.batch_get_effective_iam_policies(request) -def test_search_all_resources_rest_pager(transport: str = 'rest'): +def test_batch_get_effective_iam_policies_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='rest' ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllResourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'scope': 'sample1/sample2'} - - pager = client.search_all_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in results) - - pages = list(client.search_all_resources(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, + asset_service.AnalyzeOrgPoliciesRequest, dict, ]) -def test_search_all_iam_policies_rest(request_type): +def test_analyze_org_policies_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5745,30 +11404,31 @@ def test_search_all_iam_policies_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse( + return_value = asset_service.AnalyzeOrgPoliciesResponse( next_page_token='next_page_token_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.search_all_iam_policies(request) + response = client.analyze_org_policies(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesPager) + assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) assert response.next_page_token == 'next_page_token_value' -def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): +def test_analyze_org_policies_rest_required_fields(request_type=asset_service.AnalyzeOrgPoliciesRequest): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["scope"] = "" + request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -5778,22 +11438,28 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service )) # verify fields with default values are dropped + assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == request_init["constraint"] jsonified_request["scope"] = 'scope_value' + jsonified_request["constraint"] = 'constraint_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request assert jsonified_request["scope"] == 'scope_value' + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5802,7 +11468,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse() + return_value = asset_service.AnalyzeOrgPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -5822,29 +11488,33 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.search_all_iam_policies(request) + response = client.analyze_org_policies(request) expected_params = [ + ( + "constraint", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_search_all_iam_policies_rest_unset_required_fields(): +def test_analyze_org_policies_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) + unset_fields = transport.analyze_org_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_iam_policies_rest_interceptors(null_interceptor): +def test_analyze_org_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -5852,11 +11522,11 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) + pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5867,23 +11537,23 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) + req.return_value._content = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) - request = asset_service.SearchAllIamPoliciesRequest() + request = asset_service.AnalyzeOrgPoliciesRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.SearchAllIamPoliciesResponse() + post.return_value = asset_service.AnalyzeOrgPoliciesResponse() - client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllIamPoliciesRequest): +def test_analyze_org_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPoliciesRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5900,10 +11570,10 @@ def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', reque response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.search_all_iam_policies(request) + client.analyze_org_policies(request) -def test_search_all_iam_policies_rest_flattened(): +def test_analyze_org_policies_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5912,7 +11582,7 @@ def test_search_all_iam_policies_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse() + return_value = asset_service.AnalyzeOrgPoliciesResponse() # get arguments that satisfy an http rule for this method sample_request = {'scope': 'sample1/sample2'} @@ -5920,28 +11590,29 @@ def test_search_all_iam_policies_rest_flattened(): # get truthy value for each flattened field mock_args = dict( scope='scope_value', - query='query_value', + constraint='constraint_value', + filter='filter_value', ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.search_all_iam_policies(**mock_args) + client.analyze_org_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicies" % client.transport._host, args[1]) -def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): +def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5950,14 +11621,15 @@ def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.search_all_iam_policies( - asset_service.SearchAllIamPoliciesRequest(), + client.analyze_org_policies( + asset_service.AnalyzeOrgPoliciesRequest(), scope='scope_value', - query='query_value', + constraint='constraint_value', + filter='filter_value', ) -def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): +def test_analyze_org_policies_rest_pager(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5969,28 +11641,28 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): #with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], next_page_token='abc', ), - asset_service.SearchAllIamPoliciesResponse( - results=[], + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], next_page_token='def', ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], next_page_token='ghi', ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], ), ) @@ -5998,7 +11670,7 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response) + response = tuple(asset_service.AnalyzeOrgPoliciesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode('UTF-8') @@ -6007,58 +11679,60 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): sample_request = {'scope': 'sample1/sample2'} - pager = client.search_all_iam_policies(request=sample_request) + pager = client.analyze_org_policies(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) + assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) for i in results) - pages = list(client.search_all_iam_policies(request=sample_request).pages) + pages = list(client.analyze_org_policies(request=sample_request).pages) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict, ]) -def test_analyze_iam_policy_rest(request_type): +def test_analyze_org_policy_governed_containers_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {'scope': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.analyze_iam_policy(request) + response = client.analyze_org_policy_governed_containers(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) - assert response.fully_explored is True + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) + assert response.next_page_token == 'next_page_token_value' -def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyRequest): +def test_analyze_org_policy_governed_containers_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): transport_class = transports.AssetServiceRestTransport request_init = {} + request_init["scope"] = "" + request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -6068,18 +11742,28 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal )) # verify fields with default values are dropped + assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == request_init["constraint"] - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request["scope"] = 'scope_value' + jsonified_request["constraint"] = 'constraint_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("analysis_query", "execution_timeout", )) + assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6088,7 +11772,7 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeIamPolicyResponse() + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -6108,29 +11792,33 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.analyze_iam_policy(request) + response = client.analyze_org_policy_governed_containers(request) expected_params = [ + ( + "constraint", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_analyze_iam_policy_rest_unset_required_fields(): +def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", )) & set(("analysisQuery", ))) + unset_fields = transport.analyze_org_policy_governed_containers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_rest_interceptors(null_interceptor): +def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -6138,11 +11826,11 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) + pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6153,30 +11841,30 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) + req.return_value._content = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) - request = asset_service.AnalyzeIamPolicyRequest() + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeIamPolicyResponse() + post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyRequest): +def test_analyze_org_policy_governed_containers_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {'scope': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6186,52 +11874,169 @@ def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_ty response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.analyze_iam_policy(request) + client.analyze_org_policy_governed_containers(request) -def test_analyze_iam_policy_rest_error(): +def test_analyze_org_policy_governed_containers_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_org_policy_governed_containers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers" % client.transport._host, args[1]) + + +def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policy_governed_containers( + asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + +def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.analyze_org_policy_governed_containers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) + for i in results) + + pages = list(client.analyze_org_policy_governed_containers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict, ]) -def test_analyze_iam_policy_longrunning_rest(request_type): +def test_analyze_org_policy_governed_assets_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {'scope': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.analyze_iam_policy_longrunning(request) + response = client.analyze_org_policy_governed_assets(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) + assert response.next_page_token == 'next_page_token_value' -def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): transport_class = transports.AssetServiceRestTransport request_init = {} + request_init["scope"] = "" + request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -6241,16 +12046,28 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ )) # verify fields with default values are dropped + assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == request_init["constraint"] - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + jsonified_request["scope"] = 'scope_value' + jsonified_request["constraint"] = 'constraint_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6259,7 +12076,7 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -6271,36 +12088,41 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "post", + 'method': "get", 'query_params': pb_request, } - transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.analyze_iam_policy_longrunning(request) + response = client.analyze_org_policy_governed_assets(request) expected_params = [ + ( + "constraint", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): +def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) + unset_fields = transport.analyze_org_policy_governed_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): +def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -6308,12 +12130,11 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) + pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6324,48 +12145,159 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) - request = asset_service.AnalyzeIamPolicyLongrunningRequest() + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + + client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_org_policy_governed_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_org_policy_governed_assets(request) + + +def test_analyze_org_policy_governed_assets_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value - client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policy_governed_assets(**mock_args) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets" % client.transport._host, args[1]) -def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_iam_policy_longrunning(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policy_governed_assets( + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) -def test_analyze_iam_policy_longrunning_rest_error(): +def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.analyze_org_policy_governed_assets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) + for i in results) + + pages = list(client.analyze_org_policy_governed_assets(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -6506,6 +12438,18 @@ def test_asset_service_base_transport(): 'search_all_iam_policies', 'analyze_iam_policy', 'analyze_iam_policy_longrunning', + 'analyze_move', + 'query_assets', + 'create_saved_query', + 'get_saved_query', + 'list_saved_queries', + 'update_saved_query', + 'delete_saved_query', + 'batch_get_effective_iam_policies', + 'analyze_org_policies', + 'analyze_org_policy_governed_containers', + 'analyze_org_policy_governed_assets', + 'get_operation', ) for method in methods: with pytest.raises(NotImplementedError): @@ -6802,6 +12746,39 @@ def test_asset_service_client_transport_session_collision(transport_name): session1 = client1.transport.analyze_iam_policy_longrunning._session session2 = client2.transport.analyze_iam_policy_longrunning._session assert session1 != session2 + session1 = client1.transport.analyze_move._session + session2 = client2.transport.analyze_move._session + assert session1 != session2 + session1 = client1.transport.query_assets._session + session2 = client2.transport.query_assets._session + assert session1 != session2 + session1 = client1.transport.create_saved_query._session + session2 = client2.transport.create_saved_query._session + assert session1 != session2 + session1 = client1.transport.get_saved_query._session + session2 = client2.transport.get_saved_query._session + assert session1 != session2 + session1 = client1.transport.list_saved_queries._session + session2 = client2.transport.list_saved_queries._session + assert session1 != session2 + session1 = client1.transport.update_saved_query._session + session2 = client2.transport.update_saved_query._session + assert session1 != session2 + session1 = client1.transport.delete_saved_query._session + session2 = client2.transport.delete_saved_query._session + assert session1 != session2 + session1 = client1.transport.batch_get_effective_iam_policies._session + session2 = client2.transport.batch_get_effective_iam_policies._session + assert session1 != session2 + session1 = client1.transport.analyze_org_policies._session + session2 = client2.transport.analyze_org_policies._session + assert session1 != session2 + session1 = client1.transport.analyze_org_policy_governed_containers._session + session2 = client2.transport.analyze_org_policy_governed_containers._session + assert session1 != session2 + session1 = client1.transport.analyze_org_policy_governed_assets._session + session2 = client2.transport.analyze_org_policy_governed_assets._session + assert session1 != session2 def test_asset_service_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -6946,6 +12923,42 @@ def test_asset_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_access_level_path(): + access_policy = "squid" + access_level = "clam" + expected = "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) + actual = AssetServiceClient.access_level_path(access_policy, access_level) + assert expected == actual + + +def test_parse_access_level_path(): + expected = { + "access_policy": "whelk", + "access_level": "octopus", + } + path = AssetServiceClient.access_level_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_access_level_path(path) + assert expected == actual + +def test_access_policy_path(): + access_policy = "oyster" + expected = "accessPolicies/{access_policy}".format(access_policy=access_policy, ) + actual = AssetServiceClient.access_policy_path(access_policy) + assert expected == actual + + +def test_parse_access_policy_path(): + expected = { + "access_policy": "nudibranch", + } + path = AssetServiceClient.access_policy_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_access_policy_path(path) + assert expected == actual + def test_asset_path(): expected = "*".format() actual = AssetServiceClient.asset_path() @@ -6962,8 +12975,8 @@ def test_parse_asset_path(): assert expected == actual def test_feed_path(): - project = "squid" - feed = "clam" + project = "cuttlefish" + feed = "mussel" expected = "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) actual = AssetServiceClient.feed_path(project, feed) assert expected == actual @@ -6971,8 +12984,8 @@ def test_feed_path(): def test_parse_feed_path(): expected = { - "project": "whelk", - "feed": "octopus", + "project": "winkle", + "feed": "nautilus", } path = AssetServiceClient.feed_path(**expected) @@ -6980,8 +12993,67 @@ def test_parse_feed_path(): actual = AssetServiceClient.parse_feed_path(path) assert expected == actual +def test_inventory_path(): + project = "scallop" + location = "abalone" + instance = "squid" + expected = "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) + actual = AssetServiceClient.inventory_path(project, location, instance) + assert expected == actual + + +def test_parse_inventory_path(): + expected = { + "project": "clam", + "location": "whelk", + "instance": "octopus", + } + path = AssetServiceClient.inventory_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_inventory_path(path) + assert expected == actual + +def test_saved_query_path(): + project = "oyster" + saved_query = "nudibranch" + expected = "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) + actual = AssetServiceClient.saved_query_path(project, saved_query) + assert expected == actual + + +def test_parse_saved_query_path(): + expected = { + "project": "cuttlefish", + "saved_query": "mussel", + } + path = AssetServiceClient.saved_query_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_saved_query_path(path) + assert expected == actual + +def test_service_perimeter_path(): + access_policy = "winkle" + service_perimeter = "nautilus" + expected = "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) + actual = AssetServiceClient.service_perimeter_path(access_policy, service_perimeter) + assert expected == actual + + +def test_parse_service_perimeter_path(): + expected = { + "access_policy": "scallop", + "service_perimeter": "abalone", + } + path = AssetServiceClient.service_perimeter_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_service_perimeter_path(path) + assert expected == actual + def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) actual = AssetServiceClient.common_billing_account_path(billing_account) assert expected == actual @@ -6989,7 +13061,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "clam", } path = AssetServiceClient.common_billing_account_path(**expected) @@ -6998,7 +13070,7 @@ def test_parse_common_billing_account_path(): assert expected == actual def test_common_folder_path(): - folder = "cuttlefish" + folder = "whelk" expected = "folders/{folder}".format(folder=folder, ) actual = AssetServiceClient.common_folder_path(folder) assert expected == actual @@ -7006,7 +13078,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "octopus", } path = AssetServiceClient.common_folder_path(**expected) @@ -7015,7 +13087,7 @@ def test_parse_common_folder_path(): assert expected == actual def test_common_organization_path(): - organization = "winkle" + organization = "oyster" expected = "organizations/{organization}".format(organization=organization, ) actual = AssetServiceClient.common_organization_path(organization) assert expected == actual @@ -7023,7 +13095,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "nudibranch", } path = AssetServiceClient.common_organization_path(**expected) @@ -7032,7 +13104,7 @@ def test_parse_common_organization_path(): assert expected == actual def test_common_project_path(): - project = "scallop" + project = "cuttlefish" expected = "projects/{project}".format(project=project, ) actual = AssetServiceClient.common_project_path(project) assert expected == actual @@ -7040,7 +13112,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "mussel", } path = AssetServiceClient.common_project_path(**expected) @@ -7049,8 +13121,8 @@ def test_parse_common_project_path(): assert expected == actual def test_common_location_path(): - project = "squid" - location = "clam" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) actual = AssetServiceClient.common_location_path(project, location) assert expected == actual @@ -7058,8 +13130,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "scallop", + "location": "abalone", } path = AssetServiceClient.common_location_path(**expected) @@ -7098,6 +13170,183 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "rest": "_session", diff --git a/tests/integration/goldens/credentials/docs/_static/custom.css b/tests/integration/goldens/credentials/docs/_static/custom.css new file mode 100755 index 0000000000..06423be0b5 --- /dev/null +++ b/tests/integration/goldens/credentials/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/tests/integration/goldens/credentials/docs/conf.py b/tests/integration/goldens/credentials/docs/conf.py index 8ab9f3cbcb..760611c76f 100755 --- a/tests/integration/goldens/credentials/docs/conf.py +++ b/tests/integration/goldens/credentials/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/tests/integration/goldens/credentials/noxfile.py b/tests/integration/goldens/credentials/noxfile.py index a9202ef71b..b4f8b23509 100755 --- a/tests/integration/goldens/credentials/noxfile.py +++ b/tests/integration/goldens/credentials/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/tests/integration/goldens/eventarc/docs/_static/custom.css b/tests/integration/goldens/eventarc/docs/_static/custom.css new file mode 100755 index 0000000000..06423be0b5 --- /dev/null +++ b/tests/integration/goldens/eventarc/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/tests/integration/goldens/eventarc/docs/conf.py b/tests/integration/goldens/eventarc/docs/conf.py index 5a1bf0a742..3859f04c4d 100755 --- a/tests/integration/goldens/eventarc/docs/conf.py +++ b/tests/integration/goldens/eventarc/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py index db48ba5fdf..76921a7ee0 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py @@ -21,33 +21,81 @@ from google.cloud.eventarc_v1.services.eventarc.client import EventarcClient from google.cloud.eventarc_v1.services.eventarc.async_client import EventarcAsyncClient +from google.cloud.eventarc_v1.types.channel import Channel +from google.cloud.eventarc_v1.types.channel_connection import ChannelConnection +from google.cloud.eventarc_v1.types.discovery import EventType +from google.cloud.eventarc_v1.types.discovery import FilteringAttribute +from google.cloud.eventarc_v1.types.discovery import Provider +from google.cloud.eventarc_v1.types.eventarc import CreateChannelConnectionRequest +from google.cloud.eventarc_v1.types.eventarc import CreateChannelRequest from google.cloud.eventarc_v1.types.eventarc import CreateTriggerRequest +from google.cloud.eventarc_v1.types.eventarc import DeleteChannelConnectionRequest +from google.cloud.eventarc_v1.types.eventarc import DeleteChannelRequest from google.cloud.eventarc_v1.types.eventarc import DeleteTriggerRequest +from google.cloud.eventarc_v1.types.eventarc import GetChannelConnectionRequest +from google.cloud.eventarc_v1.types.eventarc import GetChannelRequest +from google.cloud.eventarc_v1.types.eventarc import GetGoogleChannelConfigRequest +from google.cloud.eventarc_v1.types.eventarc import GetProviderRequest from google.cloud.eventarc_v1.types.eventarc import GetTriggerRequest +from google.cloud.eventarc_v1.types.eventarc import ListChannelConnectionsRequest +from google.cloud.eventarc_v1.types.eventarc import ListChannelConnectionsResponse +from google.cloud.eventarc_v1.types.eventarc import ListChannelsRequest +from google.cloud.eventarc_v1.types.eventarc import ListChannelsResponse +from google.cloud.eventarc_v1.types.eventarc import ListProvidersRequest +from google.cloud.eventarc_v1.types.eventarc import ListProvidersResponse from google.cloud.eventarc_v1.types.eventarc import ListTriggersRequest from google.cloud.eventarc_v1.types.eventarc import ListTriggersResponse from google.cloud.eventarc_v1.types.eventarc import OperationMetadata +from google.cloud.eventarc_v1.types.eventarc import UpdateChannelRequest +from google.cloud.eventarc_v1.types.eventarc import UpdateGoogleChannelConfigRequest from google.cloud.eventarc_v1.types.eventarc import UpdateTriggerRequest +from google.cloud.eventarc_v1.types.google_channel_config import GoogleChannelConfig from google.cloud.eventarc_v1.types.trigger import CloudRun from google.cloud.eventarc_v1.types.trigger import Destination from google.cloud.eventarc_v1.types.trigger import EventFilter +from google.cloud.eventarc_v1.types.trigger import GKE from google.cloud.eventarc_v1.types.trigger import Pubsub +from google.cloud.eventarc_v1.types.trigger import StateCondition from google.cloud.eventarc_v1.types.trigger import Transport from google.cloud.eventarc_v1.types.trigger import Trigger __all__ = ('EventarcClient', 'EventarcAsyncClient', + 'Channel', + 'ChannelConnection', + 'EventType', + 'FilteringAttribute', + 'Provider', + 'CreateChannelConnectionRequest', + 'CreateChannelRequest', 'CreateTriggerRequest', + 'DeleteChannelConnectionRequest', + 'DeleteChannelRequest', 'DeleteTriggerRequest', + 'GetChannelConnectionRequest', + 'GetChannelRequest', + 'GetGoogleChannelConfigRequest', + 'GetProviderRequest', 'GetTriggerRequest', + 'ListChannelConnectionsRequest', + 'ListChannelConnectionsResponse', + 'ListChannelsRequest', + 'ListChannelsResponse', + 'ListProvidersRequest', + 'ListProvidersResponse', 'ListTriggersRequest', 'ListTriggersResponse', 'OperationMetadata', + 'UpdateChannelRequest', + 'UpdateGoogleChannelConfigRequest', 'UpdateTriggerRequest', + 'GoogleChannelConfig', 'CloudRun', 'Destination', 'EventFilter', + 'GKE', 'Pubsub', + 'StateCondition', 'Transport', 'Trigger', ) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index a9881629c6..5b322f29c5 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -21,34 +21,82 @@ from .services.eventarc import EventarcClient from .services.eventarc import EventarcAsyncClient +from .types.channel import Channel +from .types.channel_connection import ChannelConnection +from .types.discovery import EventType +from .types.discovery import FilteringAttribute +from .types.discovery import Provider +from .types.eventarc import CreateChannelConnectionRequest +from .types.eventarc import CreateChannelRequest from .types.eventarc import CreateTriggerRequest +from .types.eventarc import DeleteChannelConnectionRequest +from .types.eventarc import DeleteChannelRequest from .types.eventarc import DeleteTriggerRequest +from .types.eventarc import GetChannelConnectionRequest +from .types.eventarc import GetChannelRequest +from .types.eventarc import GetGoogleChannelConfigRequest +from .types.eventarc import GetProviderRequest from .types.eventarc import GetTriggerRequest +from .types.eventarc import ListChannelConnectionsRequest +from .types.eventarc import ListChannelConnectionsResponse +from .types.eventarc import ListChannelsRequest +from .types.eventarc import ListChannelsResponse +from .types.eventarc import ListProvidersRequest +from .types.eventarc import ListProvidersResponse from .types.eventarc import ListTriggersRequest from .types.eventarc import ListTriggersResponse from .types.eventarc import OperationMetadata +from .types.eventarc import UpdateChannelRequest +from .types.eventarc import UpdateGoogleChannelConfigRequest from .types.eventarc import UpdateTriggerRequest +from .types.google_channel_config import GoogleChannelConfig from .types.trigger import CloudRun from .types.trigger import Destination from .types.trigger import EventFilter +from .types.trigger import GKE from .types.trigger import Pubsub +from .types.trigger import StateCondition from .types.trigger import Transport from .types.trigger import Trigger __all__ = ( 'EventarcAsyncClient', +'Channel', +'ChannelConnection', 'CloudRun', +'CreateChannelConnectionRequest', +'CreateChannelRequest', 'CreateTriggerRequest', +'DeleteChannelConnectionRequest', +'DeleteChannelRequest', 'DeleteTriggerRequest', 'Destination', 'EventFilter', +'EventType', 'EventarcClient', +'FilteringAttribute', +'GKE', +'GetChannelConnectionRequest', +'GetChannelRequest', +'GetGoogleChannelConfigRequest', +'GetProviderRequest', 'GetTriggerRequest', +'GoogleChannelConfig', +'ListChannelConnectionsRequest', +'ListChannelConnectionsResponse', +'ListChannelsRequest', +'ListChannelsResponse', +'ListProvidersRequest', +'ListProvidersResponse', 'ListTriggersRequest', 'ListTriggersResponse', 'OperationMetadata', +'Provider', 'Pubsub', +'StateCondition', 'Transport', 'Trigger', +'UpdateChannelRequest', +'UpdateGoogleChannelConfigRequest', 'UpdateTriggerRequest', ) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json index f9d6974972..e560553160 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json @@ -10,26 +10,91 @@ "grpc": { "libraryClient": "EventarcClient", "rpcs": { + "CreateChannel": { + "methods": [ + "create_channel" + ] + }, + "CreateChannelConnection": { + "methods": [ + "create_channel_connection" + ] + }, "CreateTrigger": { "methods": [ "create_trigger" ] }, + "DeleteChannel": { + "methods": [ + "delete_channel" + ] + }, + "DeleteChannelConnection": { + "methods": [ + "delete_channel_connection" + ] + }, "DeleteTrigger": { "methods": [ "delete_trigger" ] }, + "GetChannel": { + "methods": [ + "get_channel" + ] + }, + "GetChannelConnection": { + "methods": [ + "get_channel_connection" + ] + }, + "GetGoogleChannelConfig": { + "methods": [ + "get_google_channel_config" + ] + }, + "GetProvider": { + "methods": [ + "get_provider" + ] + }, "GetTrigger": { "methods": [ "get_trigger" ] }, + "ListChannelConnections": { + "methods": [ + "list_channel_connections" + ] + }, + "ListChannels": { + "methods": [ + "list_channels" + ] + }, + "ListProviders": { + "methods": [ + "list_providers" + ] + }, "ListTriggers": { "methods": [ "list_triggers" ] }, + "UpdateChannel": { + "methods": [ + "update_channel" + ] + }, + "UpdateGoogleChannelConfig": { + "methods": [ + "update_google_channel_config" + ] + }, "UpdateTrigger": { "methods": [ "update_trigger" @@ -40,26 +105,91 @@ "grpc-async": { "libraryClient": "EventarcAsyncClient", "rpcs": { + "CreateChannel": { + "methods": [ + "create_channel" + ] + }, + "CreateChannelConnection": { + "methods": [ + "create_channel_connection" + ] + }, "CreateTrigger": { "methods": [ "create_trigger" ] }, + "DeleteChannel": { + "methods": [ + "delete_channel" + ] + }, + "DeleteChannelConnection": { + "methods": [ + "delete_channel_connection" + ] + }, "DeleteTrigger": { "methods": [ "delete_trigger" ] }, + "GetChannel": { + "methods": [ + "get_channel" + ] + }, + "GetChannelConnection": { + "methods": [ + "get_channel_connection" + ] + }, + "GetGoogleChannelConfig": { + "methods": [ + "get_google_channel_config" + ] + }, + "GetProvider": { + "methods": [ + "get_provider" + ] + }, "GetTrigger": { "methods": [ "get_trigger" ] }, + "ListChannelConnections": { + "methods": [ + "list_channel_connections" + ] + }, + "ListChannels": { + "methods": [ + "list_channels" + ] + }, + "ListProviders": { + "methods": [ + "list_providers" + ] + }, "ListTriggers": { "methods": [ "list_triggers" ] }, + "UpdateChannel": { + "methods": [ + "update_channel" + ] + }, + "UpdateGoogleChannelConfig": { + "methods": [ + "update_google_channel_config" + ] + }, "UpdateTrigger": { "methods": [ "update_trigger" @@ -70,26 +200,91 @@ "rest": { "libraryClient": "EventarcClient", "rpcs": { + "CreateChannel": { + "methods": [ + "create_channel" + ] + }, + "CreateChannelConnection": { + "methods": [ + "create_channel_connection" + ] + }, "CreateTrigger": { "methods": [ "create_trigger" ] }, + "DeleteChannel": { + "methods": [ + "delete_channel" + ] + }, + "DeleteChannelConnection": { + "methods": [ + "delete_channel_connection" + ] + }, "DeleteTrigger": { "methods": [ "delete_trigger" ] }, + "GetChannel": { + "methods": [ + "get_channel" + ] + }, + "GetChannelConnection": { + "methods": [ + "get_channel_connection" + ] + }, + "GetGoogleChannelConfig": { + "methods": [ + "get_google_channel_config" + ] + }, + "GetProvider": { + "methods": [ + "get_provider" + ] + }, "GetTrigger": { "methods": [ "get_trigger" ] }, + "ListChannelConnections": { + "methods": [ + "list_channel_connections" + ] + }, + "ListChannels": { + "methods": [ + "list_channels" + ] + }, + "ListProviders": { + "methods": [ + "list_providers" + ] + }, "ListTriggers": { "methods": [ "list_triggers" ] }, + "UpdateChannel": { + "methods": [ + "update_channel" + ] + }, + "UpdateGoogleChannelConfig": { + "methods": [ + "update_google_channel_config" + ] + }, "UpdateTrigger": { "methods": [ "update_trigger" diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 77bf35afad..e247bcbc10 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -35,12 +35,20 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.eventarc_v1.services.eventarc import pagers +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO @@ -59,12 +67,26 @@ class EventarcAsyncClient: DEFAULT_ENDPOINT = EventarcClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = EventarcClient.DEFAULT_MTLS_ENDPOINT + channel_path = staticmethod(EventarcClient.channel_path) + parse_channel_path = staticmethod(EventarcClient.parse_channel_path) + channel_connection_path = staticmethod(EventarcClient.channel_connection_path) + parse_channel_connection_path = staticmethod(EventarcClient.parse_channel_connection_path) + cloud_function_path = staticmethod(EventarcClient.cloud_function_path) + parse_cloud_function_path = staticmethod(EventarcClient.parse_cloud_function_path) + crypto_key_path = staticmethod(EventarcClient.crypto_key_path) + parse_crypto_key_path = staticmethod(EventarcClient.parse_crypto_key_path) + google_channel_config_path = staticmethod(EventarcClient.google_channel_config_path) + parse_google_channel_config_path = staticmethod(EventarcClient.parse_google_channel_config_path) + provider_path = staticmethod(EventarcClient.provider_path) + parse_provider_path = staticmethod(EventarcClient.parse_provider_path) service_path = staticmethod(EventarcClient.service_path) parse_service_path = staticmethod(EventarcClient.parse_service_path) service_account_path = staticmethod(EventarcClient.service_account_path) parse_service_account_path = staticmethod(EventarcClient.parse_service_account_path) trigger_path = staticmethod(EventarcClient.trigger_path) parse_trigger_path = staticmethod(EventarcClient.parse_trigger_path) + workflow_path = staticmethod(EventarcClient.workflow_path) + parse_workflow_path = staticmethod(EventarcClient.parse_workflow_path) common_billing_account_path = staticmethod(EventarcClient.common_billing_account_path) parse_common_billing_account_path = staticmethod(EventarcClient.parse_common_billing_account_path) common_folder_path = staticmethod(EventarcClient.common_folder_path) @@ -357,11 +379,10 @@ async def sample_list_triggers(): Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager: - The response message for the - ListTriggers method. - Iterating over this object will yield - results and resolve additional pages - automatically. + The response message for the ListTriggers method. + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -613,9 +634,9 @@ async def sample_update_trigger(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): The fields to be updated; only fields explicitly - provided will be updated. If no field mask is provided, - all provided fields in the request will be updated. To - update all fields, provide a field mask of "*". + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -825,6 +846,2140 @@ async def sample_delete_trigger(): # Done; return the response. return response + async def get_channel(self, + request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> channel.Channel: + r"""Get a single Channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_channel(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetChannelRequest, dict]]): + The request object. The request message for the + GetChannel method. + name (:class:`str`): + Required. The name of the channel to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Channel: + A representation of the Channel + resource. A Channel is a resource on + which event providers publish their + events. The published events are + delivered through the transport + associated with the channel. Note that a + channel is associated with exactly one + event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.GetChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_channel, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_channels(self, + request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChannelsAsyncPager: + r"""List channels. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_channels(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channels(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListChannelsRequest, dict]]): + The request object. The request message for the + ListChannels method. + parent (:class:`str`): + Required. The parent collection to + list channels on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager: + The response message for the ListChannels method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.ListChannelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_channels, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListChannelsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_channel(self, + request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel: Optional[gce_channel.Channel] = None, + channel_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new channel in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_create_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + channel = eventarc_v1.Channel() + channel.pubsub_topic = "pubsub_topic_value" + channel.name = "name_value" + + request = eventarc_v1.CreateChannelRequest( + parent="parent_value", + channel=channel, + channel_id="channel_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.CreateChannelRequest, dict]]): + The request object. The request message for the + CreateChannel method. + parent (:class:`str`): + Required. The parent collection in + which to add this channel. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel (:class:`google.cloud.eventarc_v1.types.Channel`): + Required. The channel to create. + This corresponds to the ``channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_id (:class:`str`): + Required. The user-provided ID to be + assigned to the channel. + + This corresponds to the ``channel_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, channel, channel_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.CreateChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if channel is not None: + request.channel = channel + if channel_id is not None: + request.channel_id = channel_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_channel_, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_channel(self, + request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, + *, + channel: Optional[gce_channel.Channel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a single channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_update_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateChannelRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.UpdateChannelRequest, dict]]): + The request object. The request message for the + UpdateChannel method. + channel (:class:`google.cloud.eventarc_v1.types.Channel`): + The channel to be updated. + This corresponds to the ``channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The fields to be updated; only fields explicitly + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([channel, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.UpdateChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if channel is not None: + request.channel = channel + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_channel, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("channel.name", request.channel.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_channel(self, + request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a single channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_delete_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.DeleteChannelRequest, dict]]): + The request object. The request message for the + DeleteChannel method. + name (:class:`str`): + Required. The name of the channel to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.DeleteChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_channel, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_provider(self, + request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discovery.Provider: + r"""Get a single Provider. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_provider(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetProviderRequest( + name="name_value", + ) + + # Make the request + response = await client.get_provider(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetProviderRequest, dict]]): + The request object. The request message for the + GetProvider method. + name (:class:`str`): + Required. The name of the provider to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Provider: + A representation of the Provider + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.GetProviderRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_provider, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_providers(self, + request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListProvidersAsyncPager: + r"""List providers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_providers(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListProvidersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_providers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListProvidersRequest, dict]]): + The request object. The request message for the + ListProviders method. + parent (:class:`str`): + Required. The parent of the provider + to get. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager: + The response message for the ListProviders method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.ListProvidersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_providers, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListProvidersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_channel_connection(self, + request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> channel_connection.ChannelConnection: + r"""Get a single ChannelConnection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_channel_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetChannelConnectionRequest, dict]]): + The request object. The request message for the + GetChannelConnection method. + name (:class:`str`): + Required. The name of the channel + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.ChannelConnection: + A representation of the + ChannelConnection resource. A + ChannelConnection is a resource which + event providers create during the + activation process to establish a + connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.GetChannelConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_channel_connection, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_channel_connections(self, + request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChannelConnectionsAsyncPager: + r"""List channel connections. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_channel_connections(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channel_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListChannelConnectionsRequest, dict]]): + The request object. The request message for the + ListChannelConnections method. + parent (:class:`str`): + Required. The parent collection from + which to list channel connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager: + The response message for the ListChannelConnections + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.ListChannelConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_channel_connections, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListChannelConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_channel_connection(self, + request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, + channel_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new ChannelConnection in a particular + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_create_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + channel_connection = eventarc_v1.ChannelConnection() + channel_connection.name = "name_value" + channel_connection.channel = "channel_value" + + request = eventarc_v1.CreateChannelConnectionRequest( + parent="parent_value", + channel_connection=channel_connection, + channel_connection_id="channel_connection_id_value", + ) + + # Make the request + operation = client.create_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.CreateChannelConnectionRequest, dict]]): + The request object. The request message for the + CreateChannelConnection method. + parent (:class:`str`): + Required. The parent collection in + which to add this channel connection. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_connection (:class:`google.cloud.eventarc_v1.types.ChannelConnection`): + Required. Channel connection to + create. + + This corresponds to the ``channel_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_connection_id (:class:`str`): + Required. The user-provided ID to be + assigned to the channel connection. + + This corresponds to the ``channel_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.ChannelConnection` A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event + providers create during the activation process to + establish a connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, channel_connection, channel_connection_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.CreateChannelConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if channel_connection is not None: + request.channel_connection = channel_connection + if channel_connection_id is not None: + request.channel_connection_id = channel_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_channel_connection, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_channel_connection.ChannelConnection, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_channel_connection(self, + request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a single ChannelConnection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_delete_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest, dict]]): + The request object. The request message for the + DeleteChannelConnection method. + name (:class:`str`): + Required. The name of the channel + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.ChannelConnection` A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event + providers create during the activation process to + establish a connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.DeleteChannelConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_channel_connection, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + channel_connection.ChannelConnection, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_google_channel_config(self, + request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> google_channel_config.GoogleChannelConfig: + r"""Get a GoogleChannelConfig + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleChannelConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_google_channel_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest, dict]]): + The request object. The request message for the + GetGoogleChannelConfig method. + name (:class:`str`): + Required. The name of the config to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.GetGoogleChannelConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_google_channel_config, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_google_channel_config(self, + request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, + *, + google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: + r"""Update a single GoogleChannelConfig + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_update_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + google_channel_config = eventarc_v1.GoogleChannelConfig() + google_channel_config.name = "name_value" + + request = eventarc_v1.UpdateGoogleChannelConfigRequest( + google_channel_config=google_channel_config, + ) + + # Make the request + response = await client.update_google_channel_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest, dict]]): + The request object. The request message for the + UpdateGoogleChannelConfig method. + google_channel_config (:class:`google.cloud.eventarc_v1.types.GoogleChannelConfig`): + Required. The config to be updated. + This corresponds to the ``google_channel_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The fields to be updated; only fields explicitly + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([google_channel_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.UpdateGoogleChannelConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if google_channel_config is not None: + request.google_channel_config = google_channel_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_google_channel_config, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("google_channel_config.name", request.google_channel_config.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def __aenter__(self) -> "EventarcAsyncClient": return self diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 0049fbba67..e4f83e706f 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -38,12 +38,20 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.eventarc_v1.services.eventarc import pagers +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO @@ -174,6 +182,72 @@ def transport(self) -> EventarcTransport: """ return self._transport + @staticmethod + def channel_path(project: str,location: str,channel: str,) -> str: + """Returns a fully-qualified channel string.""" + return "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) + + @staticmethod + def parse_channel_path(path: str) -> Dict[str,str]: + """Parses a channel path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/channels/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def channel_connection_path(project: str,location: str,channel_connection: str,) -> str: + """Returns a fully-qualified channel_connection string.""" + return "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) + + @staticmethod + def parse_channel_connection_path(path: str) -> Dict[str,str]: + """Parses a channel_connection path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/channelConnections/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def cloud_function_path(project: str,location: str,function: str,) -> str: + """Returns a fully-qualified cloud_function string.""" + return "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) + + @staticmethod + def parse_cloud_function_path(path: str) -> Dict[str,str]: + """Parses a cloud_function path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/functions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str,str]: + """Parses a crypto_key path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def google_channel_config_path(project: str,location: str,) -> str: + """Returns a fully-qualified google_channel_config string.""" + return "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) + + @staticmethod + def parse_google_channel_config_path(path: str) -> Dict[str,str]: + """Parses a google_channel_config path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/googleChannelConfig$", path) + return m.groupdict() if m else {} + + @staticmethod + def provider_path(project: str,location: str,provider: str,) -> str: + """Returns a fully-qualified provider string.""" + return "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) + + @staticmethod + def parse_provider_path(path: str) -> Dict[str,str]: + """Parses a provider path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/providers/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def service_path() -> str: """Returns a fully-qualified service string.""" @@ -207,6 +281,17 @@ def parse_trigger_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def workflow_path(project: str,location: str,workflow: str,) -> str: + """Returns a fully-qualified workflow string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) + + @staticmethod + def parse_workflow_path(path: str) -> Dict[str,str]: + """Parses a workflow path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str, ) -> str: """Returns a fully-qualified billing_account string.""" @@ -569,11 +654,10 @@ def sample_list_triggers(): Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager: - The response message for the - ListTriggers method. - Iterating over this object will yield - results and resolve additional pages - automatically. + The response message for the ListTriggers method. + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -825,9 +909,9 @@ def sample_update_trigger(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): The fields to be updated; only fields explicitly - provided will be updated. If no field mask is provided, - all provided fields in the request will be updated. To - update all fields, provide a field mask of "*". + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1037,26 +1121,2152 @@ def sample_delete_trigger(): # Done; return the response. return response - def __enter__(self) -> "EventarcClient": - return self + def get_channel(self, + request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> channel.Channel: + r"""Get a single Channel. - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelRequest( + name="name_value", + ) + + # Make the request + response = client.get_channel(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetChannelRequest, dict]): + The request object. The request message for the + GetChannel method. + name (str): + Required. The name of the channel to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Channel: + A representation of the Channel + resource. A Channel is a resource on + which event providers publish their + events. The published events are + delivered through the transport + associated with the channel. Note that a + channel is associated with exactly one + event provider. - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! """ - self.transport.close() + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.GetChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.GetChannelRequest): + request = eventarc.GetChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_channels(self, + request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChannelsPager: + r"""List channels. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_channels(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channels(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListChannelsRequest, dict]): + The request object. The request message for the + ListChannels method. + parent (str): + Required. The parent collection to + list channels on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager: + The response message for the ListChannels method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.ListChannelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.ListChannelsRequest): + request = eventarc.ListChannelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_channels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListChannelsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_channel(self, + request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel: Optional[gce_channel.Channel] = None, + channel_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a new channel in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + def sample_create_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + channel = eventarc_v1.Channel() + channel.pubsub_topic = "pubsub_topic_value" + channel.name = "name_value" + request = eventarc_v1.CreateChannelRequest( + parent="parent_value", + channel=channel, + channel_id="channel_id_value", + validate_only=True, + ) + # Make the request + operation = client.create_channel(request=request) + print("Waiting for operation to complete...") + response = operation.result() + # Handle the response + print(response) + Args: + request (Union[google.cloud.eventarc_v1.types.CreateChannelRequest, dict]): + The request object. The request message for the + CreateChannel method. + parent (str): + Required. The parent collection in + which to add this channel. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel (google.cloud.eventarc_v1.types.Channel): + Required. The channel to create. + This corresponds to the ``channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_id (str): + Required. The user-provided ID to be + assigned to the channel. + + This corresponds to the ``channel_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, channel, channel_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.CreateChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.CreateChannelRequest): + request = eventarc.CreateChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if channel is not None: + request.channel = channel + if channel_id is not None: + request.channel_id = channel_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_channel_] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_channel(self, + request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, + *, + channel: Optional[gce_channel.Channel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update a single channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_update_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateChannelRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdateChannelRequest, dict]): + The request object. The request message for the + UpdateChannel method. + channel (google.cloud.eventarc_v1.types.Channel): + The channel to be updated. + This corresponds to the ``channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([channel, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.UpdateChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.UpdateChannelRequest): + request = eventarc.UpdateChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if channel is not None: + request.channel = channel + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("channel.name", request.channel.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_channel(self, + request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete a single channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_delete_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeleteChannelRequest, dict]): + The request object. The request message for the + DeleteChannel method. + name (str): + Required. The name of the channel to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.DeleteChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.DeleteChannelRequest): + request = eventarc.DeleteChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_provider(self, + request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discovery.Provider: + r"""Get a single Provider. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_provider(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetProviderRequest( + name="name_value", + ) + + # Make the request + response = client.get_provider(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetProviderRequest, dict]): + The request object. The request message for the + GetProvider method. + name (str): + Required. The name of the provider to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Provider: + A representation of the Provider + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.GetProviderRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.GetProviderRequest): + request = eventarc.GetProviderRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_provider] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_providers(self, + request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListProvidersPager: + r"""List providers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_providers(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListProvidersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_providers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListProvidersRequest, dict]): + The request object. The request message for the + ListProviders method. + parent (str): + Required. The parent of the provider + to get. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager: + The response message for the ListProviders method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.ListProvidersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.ListProvidersRequest): + request = eventarc.ListProvidersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_providers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListProvidersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_channel_connection(self, + request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> channel_connection.ChannelConnection: + r"""Get a single ChannelConnection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_channel_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetChannelConnectionRequest, dict]): + The request object. The request message for the + GetChannelConnection method. + name (str): + Required. The name of the channel + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.ChannelConnection: + A representation of the + ChannelConnection resource. A + ChannelConnection is a resource which + event providers create during the + activation process to establish a + connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.GetChannelConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.GetChannelConnectionRequest): + request = eventarc.GetChannelConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_channel_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_channel_connections(self, + request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChannelConnectionsPager: + r"""List channel connections. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_channel_connections(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channel_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListChannelConnectionsRequest, dict]): + The request object. The request message for the + ListChannelConnections method. + parent (str): + Required. The parent collection from + which to list channel connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager: + The response message for the ListChannelConnections + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.ListChannelConnectionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.ListChannelConnectionsRequest): + request = eventarc.ListChannelConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_channel_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListChannelConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_channel_connection(self, + request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, + channel_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a new ChannelConnection in a particular + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_create_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + channel_connection = eventarc_v1.ChannelConnection() + channel_connection.name = "name_value" + channel_connection.channel = "channel_value" + + request = eventarc_v1.CreateChannelConnectionRequest( + parent="parent_value", + channel_connection=channel_connection, + channel_connection_id="channel_connection_id_value", + ) + + # Make the request + operation = client.create_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.CreateChannelConnectionRequest, dict]): + The request object. The request message for the + CreateChannelConnection method. + parent (str): + Required. The parent collection in + which to add this channel connection. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_connection (google.cloud.eventarc_v1.types.ChannelConnection): + Required. Channel connection to + create. + + This corresponds to the ``channel_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_connection_id (str): + Required. The user-provided ID to be + assigned to the channel connection. + + This corresponds to the ``channel_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.ChannelConnection` A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event + providers create during the activation process to + establish a connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, channel_connection, channel_connection_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.CreateChannelConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.CreateChannelConnectionRequest): + request = eventarc.CreateChannelConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if channel_connection is not None: + request.channel_connection = channel_connection + if channel_connection_id is not None: + request.channel_connection_id = channel_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_channel_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_channel_connection.ChannelConnection, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_channel_connection(self, + request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete a single ChannelConnection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_delete_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest, dict]): + The request object. The request message for the + DeleteChannelConnection method. + name (str): + Required. The name of the channel + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.ChannelConnection` A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event + providers create during the activation process to + establish a connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.DeleteChannelConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.DeleteChannelConnectionRequest): + request = eventarc.DeleteChannelConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_channel_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + channel_connection.ChannelConnection, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_google_channel_config(self, + request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> google_channel_config.GoogleChannelConfig: + r"""Get a GoogleChannelConfig + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleChannelConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_google_channel_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest, dict]): + The request object. The request message for the + GetGoogleChannelConfig method. + name (str): + Required. The name of the config to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.GetGoogleChannelConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.GetGoogleChannelConfigRequest): + request = eventarc.GetGoogleChannelConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_google_channel_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_google_channel_config(self, + request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, + *, + google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: + r"""Update a single GoogleChannelConfig + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_update_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + google_channel_config = eventarc_v1.GoogleChannelConfig() + google_channel_config.name = "name_value" + + request = eventarc_v1.UpdateGoogleChannelConfigRequest( + google_channel_config=google_channel_config, + ) + + # Make the request + response = client.update_google_channel_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest, dict]): + The request object. The request message for the + UpdateGoogleChannelConfig method. + google_channel_config (google.cloud.eventarc_v1.types.GoogleChannelConfig): + Required. The config to be updated. + This corresponds to the ``google_channel_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([google_channel_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.UpdateGoogleChannelConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.UpdateGoogleChannelConfigRequest): + request = eventarc.UpdateGoogleChannelConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if google_channel_config is not None: + request.google_channel_config = google_channel_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_google_channel_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("google_channel_config.name", request.google_channel_config.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "EventarcClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 1956f7bb1a..773f83d8d8 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -15,6 +15,9 @@ # from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc from google.cloud.eventarc_v1.types import trigger @@ -138,3 +141,366 @@ async def async_generator(): def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListChannelsPager: + """A pager for iterating through ``list_channels`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListChannelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``channels`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListChannels`` requests and continue to iterate + through the ``channels`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListChannelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListChannelsResponse], + request: eventarc.ListChannelsRequest, + response: eventarc.ListChannelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListChannelsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListChannelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListChannelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListChannelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[channel.Channel]: + for page in self.pages: + yield from page.channels + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListChannelsAsyncPager: + """A pager for iterating through ``list_channels`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListChannelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``channels`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListChannels`` requests and continue to iterate + through the ``channels`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListChannelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListChannelsResponse]], + request: eventarc.ListChannelsRequest, + response: eventarc.ListChannelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListChannelsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListChannelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListChannelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListChannelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[channel.Channel]: + async def async_generator(): + async for page in self.pages: + for response in page.channels: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListProvidersPager: + """A pager for iterating through ``list_providers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListProvidersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``providers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListProviders`` requests and continue to iterate + through the ``providers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListProvidersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListProvidersResponse], + request: eventarc.ListProvidersRequest, + response: eventarc.ListProvidersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListProvidersRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListProvidersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListProvidersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListProvidersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[discovery.Provider]: + for page in self.pages: + yield from page.providers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListProvidersAsyncPager: + """A pager for iterating through ``list_providers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListProvidersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``providers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListProviders`` requests and continue to iterate + through the ``providers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListProvidersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListProvidersResponse]], + request: eventarc.ListProvidersRequest, + response: eventarc.ListProvidersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListProvidersRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListProvidersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListProvidersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListProvidersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[discovery.Provider]: + async def async_generator(): + async for page in self.pages: + for response in page.providers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListChannelConnectionsPager: + """A pager for iterating through ``list_channel_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListChannelConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``channel_connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListChannelConnections`` requests and continue to iterate + through the ``channel_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListChannelConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListChannelConnectionsResponse], + request: eventarc.ListChannelConnectionsRequest, + response: eventarc.ListChannelConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListChannelConnectionsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListChannelConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListChannelConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListChannelConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[channel_connection.ChannelConnection]: + for page in self.pages: + yield from page.channel_connections + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListChannelConnectionsAsyncPager: + """A pager for iterating through ``list_channel_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListChannelConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``channel_connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListChannelConnections`` requests and continue to iterate + through the ``channel_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListChannelConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListChannelConnectionsResponse]], + request: eventarc.ListChannelConnectionsRequest, + response: eventarc.ListChannelConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListChannelConnectionsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListChannelConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListChannelConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListChannelConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[channel_connection.ChannelConnection]: + async def async_generator(): + async for page in self.pages: + for response in page.channel_connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 7a2e60bc0b..7096c28dcc 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -27,12 +27,17 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -144,6 +149,71 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_channel: gapic_v1.method.wrap_method( + self.get_channel, + default_timeout=None, + client_info=client_info, + ), + self.list_channels: gapic_v1.method.wrap_method( + self.list_channels, + default_timeout=None, + client_info=client_info, + ), + self.create_channel_: gapic_v1.method.wrap_method( + self.create_channel_, + default_timeout=None, + client_info=client_info, + ), + self.update_channel: gapic_v1.method.wrap_method( + self.update_channel, + default_timeout=None, + client_info=client_info, + ), + self.delete_channel: gapic_v1.method.wrap_method( + self.delete_channel, + default_timeout=None, + client_info=client_info, + ), + self.get_provider: gapic_v1.method.wrap_method( + self.get_provider, + default_timeout=None, + client_info=client_info, + ), + self.list_providers: gapic_v1.method.wrap_method( + self.list_providers, + default_timeout=None, + client_info=client_info, + ), + self.get_channel_connection: gapic_v1.method.wrap_method( + self.get_channel_connection, + default_timeout=None, + client_info=client_info, + ), + self.list_channel_connections: gapic_v1.method.wrap_method( + self.list_channel_connections, + default_timeout=None, + client_info=client_info, + ), + self.create_channel_connection: gapic_v1.method.wrap_method( + self.create_channel_connection, + default_timeout=None, + client_info=client_info, + ), + self.delete_channel_connection: gapic_v1.method.wrap_method( + self.delete_channel_connection, + default_timeout=None, + client_info=client_info, + ), + self.get_google_channel_config: gapic_v1.method.wrap_method( + self.get_google_channel_config, + default_timeout=None, + client_info=client_info, + ), + self.update_google_channel_config: gapic_v1.method.wrap_method( + self.update_google_channel_config, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -205,6 +275,205 @@ def delete_trigger(self) -> Callable[ ]]: raise NotImplementedError() + @property + def get_channel(self) -> Callable[ + [eventarc.GetChannelRequest], + Union[ + channel.Channel, + Awaitable[channel.Channel] + ]]: + raise NotImplementedError() + + @property + def list_channels(self) -> Callable[ + [eventarc.ListChannelsRequest], + Union[ + eventarc.ListChannelsResponse, + Awaitable[eventarc.ListChannelsResponse] + ]]: + raise NotImplementedError() + + @property + def create_channel_(self) -> Callable[ + [eventarc.CreateChannelRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_channel(self) -> Callable[ + [eventarc.UpdateChannelRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_channel(self) -> Callable[ + [eventarc.DeleteChannelRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_provider(self) -> Callable[ + [eventarc.GetProviderRequest], + Union[ + discovery.Provider, + Awaitable[discovery.Provider] + ]]: + raise NotImplementedError() + + @property + def list_providers(self) -> Callable[ + [eventarc.ListProvidersRequest], + Union[ + eventarc.ListProvidersResponse, + Awaitable[eventarc.ListProvidersResponse] + ]]: + raise NotImplementedError() + + @property + def get_channel_connection(self) -> Callable[ + [eventarc.GetChannelConnectionRequest], + Union[ + channel_connection.ChannelConnection, + Awaitable[channel_connection.ChannelConnection] + ]]: + raise NotImplementedError() + + @property + def list_channel_connections(self) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + Union[ + eventarc.ListChannelConnectionsResponse, + Awaitable[eventarc.ListChannelConnectionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_channel_connection(self) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_channel_connection(self) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_google_channel_config(self) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + Union[ + google_channel_config.GoogleChannelConfig, + Awaitable[google_channel_config.GoogleChannelConfig] + ]]: + raise NotImplementedError() + + @property + def update_google_channel_config(self) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + Union[ + gce_google_channel_config.GoogleChannelConfig, + Awaitable[gce_google_channel_config.GoogleChannelConfig] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 26f3ff79e7..1b00e78106 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -25,12 +25,17 @@ import grpc # type: ignore +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO @@ -381,9 +386,536 @@ def delete_trigger(self) -> Callable[ ) return self._stubs['delete_trigger'] + @property + def get_channel(self) -> Callable[ + [eventarc.GetChannelRequest], + channel.Channel]: + r"""Return a callable for the get channel method over gRPC. + + Get a single Channel. + + Returns: + Callable[[~.GetChannelRequest], + ~.Channel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_channel' not in self._stubs: + self._stubs['get_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetChannel', + request_serializer=eventarc.GetChannelRequest.serialize, + response_deserializer=channel.Channel.deserialize, + ) + return self._stubs['get_channel'] + + @property + def list_channels(self) -> Callable[ + [eventarc.ListChannelsRequest], + eventarc.ListChannelsResponse]: + r"""Return a callable for the list channels method over gRPC. + + List channels. + + Returns: + Callable[[~.ListChannelsRequest], + ~.ListChannelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_channels' not in self._stubs: + self._stubs['list_channels'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListChannels', + request_serializer=eventarc.ListChannelsRequest.serialize, + response_deserializer=eventarc.ListChannelsResponse.deserialize, + ) + return self._stubs['list_channels'] + + @property + def create_channel_(self) -> Callable[ + [eventarc.CreateChannelRequest], + operations_pb2.Operation]: + r"""Return a callable for the create channel method over gRPC. + + Create a new channel in a particular project and + location. + + Returns: + Callable[[~.CreateChannelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_channel_' not in self._stubs: + self._stubs['create_channel_'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateChannel', + request_serializer=eventarc.CreateChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_channel_'] + + @property + def update_channel(self) -> Callable[ + [eventarc.UpdateChannelRequest], + operations_pb2.Operation]: + r"""Return a callable for the update channel method over gRPC. + + Update a single channel. + + Returns: + Callable[[~.UpdateChannelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_channel' not in self._stubs: + self._stubs['update_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', + request_serializer=eventarc.UpdateChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_channel'] + + @property + def delete_channel(self) -> Callable[ + [eventarc.DeleteChannelRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete channel method over gRPC. + + Delete a single channel. + + Returns: + Callable[[~.DeleteChannelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_channel' not in self._stubs: + self._stubs['delete_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', + request_serializer=eventarc.DeleteChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_channel'] + + @property + def get_provider(self) -> Callable[ + [eventarc.GetProviderRequest], + discovery.Provider]: + r"""Return a callable for the get provider method over gRPC. + + Get a single Provider. + + Returns: + Callable[[~.GetProviderRequest], + ~.Provider]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_provider' not in self._stubs: + self._stubs['get_provider'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetProvider', + request_serializer=eventarc.GetProviderRequest.serialize, + response_deserializer=discovery.Provider.deserialize, + ) + return self._stubs['get_provider'] + + @property + def list_providers(self) -> Callable[ + [eventarc.ListProvidersRequest], + eventarc.ListProvidersResponse]: + r"""Return a callable for the list providers method over gRPC. + + List providers. + + Returns: + Callable[[~.ListProvidersRequest], + ~.ListProvidersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_providers' not in self._stubs: + self._stubs['list_providers'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListProviders', + request_serializer=eventarc.ListProvidersRequest.serialize, + response_deserializer=eventarc.ListProvidersResponse.deserialize, + ) + return self._stubs['list_providers'] + + @property + def get_channel_connection(self) -> Callable[ + [eventarc.GetChannelConnectionRequest], + channel_connection.ChannelConnection]: + r"""Return a callable for the get channel connection method over gRPC. + + Get a single ChannelConnection. + + Returns: + Callable[[~.GetChannelConnectionRequest], + ~.ChannelConnection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_channel_connection' not in self._stubs: + self._stubs['get_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', + request_serializer=eventarc.GetChannelConnectionRequest.serialize, + response_deserializer=channel_connection.ChannelConnection.deserialize, + ) + return self._stubs['get_channel_connection'] + + @property + def list_channel_connections(self) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + eventarc.ListChannelConnectionsResponse]: + r"""Return a callable for the list channel connections method over gRPC. + + List channel connections. + + Returns: + Callable[[~.ListChannelConnectionsRequest], + ~.ListChannelConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_channel_connections' not in self._stubs: + self._stubs['list_channel_connections'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', + request_serializer=eventarc.ListChannelConnectionsRequest.serialize, + response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, + ) + return self._stubs['list_channel_connections'] + + @property + def create_channel_connection(self) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the create channel connection method over gRPC. + + Create a new ChannelConnection in a particular + project and location. + + Returns: + Callable[[~.CreateChannelConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_channel_connection' not in self._stubs: + self._stubs['create_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', + request_serializer=eventarc.CreateChannelConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_channel_connection'] + + @property + def delete_channel_connection(self) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete channel connection method over gRPC. + + Delete a single ChannelConnection. + + Returns: + Callable[[~.DeleteChannelConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_channel_connection' not in self._stubs: + self._stubs['delete_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', + request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_channel_connection'] + + @property + def get_google_channel_config(self) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + google_channel_config.GoogleChannelConfig]: + r"""Return a callable for the get google channel config method over gRPC. + + Get a GoogleChannelConfig + + Returns: + Callable[[~.GetGoogleChannelConfigRequest], + ~.GoogleChannelConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_google_channel_config' not in self._stubs: + self._stubs['get_google_channel_config'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', + request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, + response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, + ) + return self._stubs['get_google_channel_config'] + + @property + def update_google_channel_config(self) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + gce_google_channel_config.GoogleChannelConfig]: + r"""Return a callable for the update google channel config method over gRPC. + + Update a single GoogleChannelConfig + + Returns: + Callable[[~.UpdateGoogleChannelConfigRequest], + ~.GoogleChannelConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_google_channel_config' not in self._stubs: + self._stubs['update_google_channel_config'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', + request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, + response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, + ) + return self._stubs['update_google_channel_config'] + def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + @property def kind(self) -> str: return "grpc" diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 2d8824845b..b6e2874c4c 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -25,12 +25,17 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO from .grpc import EventarcGrpcTransport @@ -384,9 +389,536 @@ def delete_trigger(self) -> Callable[ ) return self._stubs['delete_trigger'] + @property + def get_channel(self) -> Callable[ + [eventarc.GetChannelRequest], + Awaitable[channel.Channel]]: + r"""Return a callable for the get channel method over gRPC. + + Get a single Channel. + + Returns: + Callable[[~.GetChannelRequest], + Awaitable[~.Channel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_channel' not in self._stubs: + self._stubs['get_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetChannel', + request_serializer=eventarc.GetChannelRequest.serialize, + response_deserializer=channel.Channel.deserialize, + ) + return self._stubs['get_channel'] + + @property + def list_channels(self) -> Callable[ + [eventarc.ListChannelsRequest], + Awaitable[eventarc.ListChannelsResponse]]: + r"""Return a callable for the list channels method over gRPC. + + List channels. + + Returns: + Callable[[~.ListChannelsRequest], + Awaitable[~.ListChannelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_channels' not in self._stubs: + self._stubs['list_channels'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListChannels', + request_serializer=eventarc.ListChannelsRequest.serialize, + response_deserializer=eventarc.ListChannelsResponse.deserialize, + ) + return self._stubs['list_channels'] + + @property + def create_channel_(self) -> Callable[ + [eventarc.CreateChannelRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create channel method over gRPC. + + Create a new channel in a particular project and + location. + + Returns: + Callable[[~.CreateChannelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_channel_' not in self._stubs: + self._stubs['create_channel_'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateChannel', + request_serializer=eventarc.CreateChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_channel_'] + + @property + def update_channel(self) -> Callable[ + [eventarc.UpdateChannelRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update channel method over gRPC. + + Update a single channel. + + Returns: + Callable[[~.UpdateChannelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_channel' not in self._stubs: + self._stubs['update_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', + request_serializer=eventarc.UpdateChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_channel'] + + @property + def delete_channel(self) -> Callable[ + [eventarc.DeleteChannelRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete channel method over gRPC. + + Delete a single channel. + + Returns: + Callable[[~.DeleteChannelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_channel' not in self._stubs: + self._stubs['delete_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', + request_serializer=eventarc.DeleteChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_channel'] + + @property + def get_provider(self) -> Callable[ + [eventarc.GetProviderRequest], + Awaitable[discovery.Provider]]: + r"""Return a callable for the get provider method over gRPC. + + Get a single Provider. + + Returns: + Callable[[~.GetProviderRequest], + Awaitable[~.Provider]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_provider' not in self._stubs: + self._stubs['get_provider'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetProvider', + request_serializer=eventarc.GetProviderRequest.serialize, + response_deserializer=discovery.Provider.deserialize, + ) + return self._stubs['get_provider'] + + @property + def list_providers(self) -> Callable[ + [eventarc.ListProvidersRequest], + Awaitable[eventarc.ListProvidersResponse]]: + r"""Return a callable for the list providers method over gRPC. + + List providers. + + Returns: + Callable[[~.ListProvidersRequest], + Awaitable[~.ListProvidersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_providers' not in self._stubs: + self._stubs['list_providers'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListProviders', + request_serializer=eventarc.ListProvidersRequest.serialize, + response_deserializer=eventarc.ListProvidersResponse.deserialize, + ) + return self._stubs['list_providers'] + + @property + def get_channel_connection(self) -> Callable[ + [eventarc.GetChannelConnectionRequest], + Awaitable[channel_connection.ChannelConnection]]: + r"""Return a callable for the get channel connection method over gRPC. + + Get a single ChannelConnection. + + Returns: + Callable[[~.GetChannelConnectionRequest], + Awaitable[~.ChannelConnection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_channel_connection' not in self._stubs: + self._stubs['get_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', + request_serializer=eventarc.GetChannelConnectionRequest.serialize, + response_deserializer=channel_connection.ChannelConnection.deserialize, + ) + return self._stubs['get_channel_connection'] + + @property + def list_channel_connections(self) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + Awaitable[eventarc.ListChannelConnectionsResponse]]: + r"""Return a callable for the list channel connections method over gRPC. + + List channel connections. + + Returns: + Callable[[~.ListChannelConnectionsRequest], + Awaitable[~.ListChannelConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_channel_connections' not in self._stubs: + self._stubs['list_channel_connections'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', + request_serializer=eventarc.ListChannelConnectionsRequest.serialize, + response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, + ) + return self._stubs['list_channel_connections'] + + @property + def create_channel_connection(self) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create channel connection method over gRPC. + + Create a new ChannelConnection in a particular + project and location. + + Returns: + Callable[[~.CreateChannelConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_channel_connection' not in self._stubs: + self._stubs['create_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', + request_serializer=eventarc.CreateChannelConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_channel_connection'] + + @property + def delete_channel_connection(self) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete channel connection method over gRPC. + + Delete a single ChannelConnection. + + Returns: + Callable[[~.DeleteChannelConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_channel_connection' not in self._stubs: + self._stubs['delete_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', + request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_channel_connection'] + + @property + def get_google_channel_config(self) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + Awaitable[google_channel_config.GoogleChannelConfig]]: + r"""Return a callable for the get google channel config method over gRPC. + + Get a GoogleChannelConfig + + Returns: + Callable[[~.GetGoogleChannelConfigRequest], + Awaitable[~.GoogleChannelConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_google_channel_config' not in self._stubs: + self._stubs['get_google_channel_config'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', + request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, + response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, + ) + return self._stubs['get_google_channel_config'] + + @property + def update_google_channel_config(self) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + Awaitable[gce_google_channel_config.GoogleChannelConfig]]: + r"""Return a callable for the update google channel config method over gRPC. + + Update a single GoogleChannelConfig + + Returns: + Callable[[~.UpdateGoogleChannelConfigRequest], + Awaitable[~.GoogleChannelConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_google_channel_config' not in self._stubs: + self._stubs['update_google_channel_config'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', + request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, + response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, + ) + return self._stubs['update_google_channel_config'] + def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + __all__ = ( 'EventarcGrpcAsyncIOTransport', diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 028e474937..32a4100f5f 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -43,7 +43,12 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.longrunning import operations_pb2 # type: ignore @@ -72,6 +77,22 @@ class EventarcRestInterceptor: .. code-block:: python class MyCustomEventarcInterceptor(EventarcRestInterceptor): + def pre_create_channel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_channel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_channel_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_channel_connection(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -80,6 +101,22 @@ def post_create_trigger(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_channel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_channel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_channel_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_channel_connection(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -88,6 +125,38 @@ def post_delete_trigger(self, response): logging.log(f"Received response: {response}") return response + def pre_get_channel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_channel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_channel_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_channel_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_google_channel_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_google_channel_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_provider(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_provider(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -96,6 +165,30 @@ def post_get_trigger(self, response): logging.log(f"Received response: {response}") return response + def pre_list_channel_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_channel_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_channels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_channels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_providers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_providers(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_triggers(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -104,6 +197,22 @@ def post_list_triggers(self, response): logging.log(f"Received response: {response}") return response + def pre_update_channel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_channel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_google_channel_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_google_channel_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -117,6 +226,38 @@ def post_update_trigger(self, response): """ + def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_create_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_channel + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_channel_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_create_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_channel_connection + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_trigger @@ -128,6 +269,38 @@ def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: S def post_create_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_trigger + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_delete_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_channel + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_channel_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_delete_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_channel_connection + Override in a subclass to manipulate the response after it is returned by the Eventarc server but before it is returned to user code. @@ -144,6 +317,70 @@ def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: S def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_trigger + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_channel(self, response: channel.Channel) -> channel.Channel: + """Post-rpc interceptor for get_channel + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_channel_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_channel_connection(self, response: channel_connection.ChannelConnection) -> channel_connection.ChannelConnection: + """Post-rpc interceptor for get_channel_connection + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_google_channel_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_google_channel_config(self, response: google_channel_config.GoogleChannelConfig) -> google_channel_config.GoogleChannelConfig: + """Post-rpc interceptor for get_google_channel_config + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_provider + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: + """Post-rpc interceptor for get_provider + Override in a subclass to manipulate the response after it is returned by the Eventarc server but before it is returned to user code. @@ -160,6 +397,54 @@ def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequenc def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: """Post-rpc interceptor for get_trigger + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_channel_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_channel_connections(self, response: eventarc.ListChannelConnectionsResponse) -> eventarc.ListChannelConnectionsResponse: + """Post-rpc interceptor for list_channel_connections + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_channels + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventarc.ListChannelsResponse: + """Post-rpc interceptor for list_channels + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_providers + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_providers(self, response: eventarc.ListProvidersResponse) -> eventarc.ListProvidersResponse: + """Post-rpc interceptor for list_providers + Override in a subclass to manipulate the response after it is returned by the Eventarc server but before it is returned to user code. @@ -176,6 +461,38 @@ def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Seq def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventarc.ListTriggersResponse: """Post-rpc interceptor for list_triggers + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_channel + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_google_channel_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_google_channel_config(self, response: gce_google_channel_config.GoogleChannelConfig) -> gce_google_channel_config.GoogleChannelConfig: + """Post-rpc interceptor for update_google_channel_config + Override in a subclass to manipulate the response after it is returned by the Eventarc server but before it is returned to user code. @@ -198,23 +515,204 @@ def post_update_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location -@dataclasses.dataclass -class EventarcRestStub: - _session: AuthorizedSession - _host: str - _interceptor: EventarcRestInterceptor - + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata -class EventarcRestTransport(EventarcTransport): - """REST backend transport for Eventarc. + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location - Eventarc allows users to subscribe to various events that are - provided by Google Cloud services and forward them to supported - destinations. + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy( + self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy( + self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_set_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions( + self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EventarcRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EventarcRestInterceptor + + +class EventarcRestTransport(EventarcTransport): + """REST backend transport for Eventarc. + + Eventarc allows users to subscribe to various events that are + provided by Google Cloud services and forward them to supported + destinations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation and call it. It sends JSON representations of protocol buffers over HTTP/1.1 @@ -311,6 +809,31 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], } rest_transport = operations_v1.OperationsRestTransport( @@ -326,29 +849,29 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _CreateTrigger(EventarcRestStub): + class _CreateChannel(EventarcRestStub): def __hash__(self): - return hash("CreateTrigger") + return hash("CreateChannel") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "triggerId" : "", "validateOnly" : False, } + "channelId" : "", "validateOnly" : False, } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: eventarc.CreateTriggerRequest, *, + request: eventarc.CreateChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: - r"""Call the create trigger method over HTTP. + r"""Call the create channel method over HTTP. Args: - request (~.eventarc.CreateTriggerRequest): + request (~.eventarc.CreateChannelRequest): The request object. The request message for the - CreateTrigger method. + CreateChannel method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -365,12 +888,99 @@ def __call__(self, http_options: List[Dict[str, str]] = [{ 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - 'body': 'trigger', + 'uri': '/v1/{parent=projects/*/locations/*}/channels', + 'body': 'channel', }, ] - request, metadata = self._interceptor.pre_create_trigger(request, metadata) - pb_request = eventarc.CreateTriggerRequest.pb(request) + request, metadata = self._interceptor.pre_create_channel(request, metadata) + pb_request = eventarc.CreateChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_channel(resp) + return resp + + class _CreateChannelConnection(EventarcRestStub): + def __hash__(self): + return hash("CreateChannelConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "channelConnectionId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.CreateChannelConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create channel connection method over HTTP. + + Args: + request (~.eventarc.CreateChannelConnectionRequest): + The request object. The request message for the + CreateChannelConnection method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', + 'body': 'channel_connection', + }, + ] + request, metadata = self._interceptor.pre_create_channel_connection(request, metadata) + pb_request = eventarc.CreateChannelConnectionRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -398,201 +1008,1974 @@ def __call__(self, "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_channel_connection(resp) + return resp + + class _CreateTrigger(EventarcRestStub): + def __hash__(self): + return hash("CreateTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "triggerId" : "", "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.CreateTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create trigger method over HTTP. + + Args: + request (~.eventarc.CreateTriggerRequest): + The request object. The request message for the + CreateTrigger method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + 'body': 'trigger', + }, + ] + request, metadata = self._interceptor.pre_create_trigger(request, metadata) + pb_request = eventarc.CreateTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_trigger(resp) + return resp + + class _DeleteChannel(EventarcRestStub): + def __hash__(self): + return hash("DeleteChannel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.DeleteChannelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete channel method over HTTP. + + Args: + request (~.eventarc.DeleteChannelRequest): + The request object. The request message for the + DeleteChannel method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_channel(request, metadata) + pb_request = eventarc.DeleteChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_channel(resp) + return resp + + class _DeleteChannelConnection(EventarcRestStub): + def __hash__(self): + return hash("DeleteChannelConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.DeleteChannelConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete channel connection method over HTTP. + + Args: + request (~.eventarc.DeleteChannelConnectionRequest): + The request object. The request message for the + DeleteChannelConnection method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_channel_connection(request, metadata) + pb_request = eventarc.DeleteChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_channel_connection(resp) + return resp + + class _DeleteTrigger(EventarcRestStub): + def __hash__(self): + return hash("DeleteTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.DeleteTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete trigger method over HTTP. + + Args: + request (~.eventarc.DeleteTriggerRequest): + The request object. The request message for the + DeleteTrigger method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_trigger(request, metadata) + pb_request = eventarc.DeleteTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_trigger(resp) + return resp + + class _GetChannel(EventarcRestStub): + def __hash__(self): + return hash("GetChannel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetChannelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> channel.Channel: + r"""Call the get channel method over HTTP. + + Args: + request (~.eventarc.GetChannelRequest): + The request object. The request message for the + GetChannel method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.channel.Channel: + A representation of the Channel + resource. A Channel is a resource on + which event providers publish their + events. The published events are + delivered through the transport + associated with the channel. Note that a + channel is associated with exactly one + event provider. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + }, + ] + request, metadata = self._interceptor.pre_get_channel(request, metadata) + pb_request = eventarc.GetChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = channel.Channel() + pb_resp = channel.Channel.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_channel(resp) + return resp + + class _GetChannelConnection(EventarcRestStub): + def __hash__(self): + return hash("GetChannelConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetChannelConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> channel_connection.ChannelConnection: + r"""Call the get channel connection method over HTTP. + + Args: + request (~.eventarc.GetChannelConnectionRequest): + The request object. The request message for the + GetChannelConnection method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.channel_connection.ChannelConnection: + A representation of the + ChannelConnection resource. A + ChannelConnection is a resource which + event providers create during the + activation process to establish a + connection between the provider and the + subscriber channel. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + }, + ] + request, metadata = self._interceptor.pre_get_channel_connection(request, metadata) + pb_request = eventarc.GetChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = channel_connection.ChannelConnection() + pb_resp = channel_connection.ChannelConnection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_channel_connection(resp) + return resp + + class _GetGoogleChannelConfig(EventarcRestStub): + def __hash__(self): + return hash("GetGoogleChannelConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetGoogleChannelConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> google_channel_config.GoogleChannelConfig: + r"""Call the get google channel config method over HTTP. + + Args: + request (~.eventarc.GetGoogleChannelConfigRequest): + The request object. The request message for the + GetGoogleChannelConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.google_channel_config.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/googleChannelConfig}', + }, + ] + request, metadata = self._interceptor.pre_get_google_channel_config(request, metadata) + pb_request = eventarc.GetGoogleChannelConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = google_channel_config.GoogleChannelConfig() + pb_resp = google_channel_config.GoogleChannelConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_google_channel_config(resp) + return resp + + class _GetProvider(EventarcRestStub): + def __hash__(self): + return hash("GetProvider") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetProviderRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> discovery.Provider: + r"""Call the get provider method over HTTP. + + Args: + request (~.eventarc.GetProviderRequest): + The request object. The request message for the + GetProvider method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.discovery.Provider: + A representation of the Provider + resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/providers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_provider(request, metadata) + pb_request = eventarc.GetProviderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = discovery.Provider() + pb_resp = discovery.Provider.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_provider(resp) + return resp + + class _GetTrigger(EventarcRestStub): + def __hash__(self): + return hash("GetTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> trigger.Trigger: + r"""Call the get trigger method over HTTP. + + Args: + request (~.eventarc.GetTriggerRequest): + The request object. The request message for the + GetTrigger method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.trigger.Trigger: + A representation of the trigger + resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_trigger(request, metadata) + pb_request = eventarc.GetTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = trigger.Trigger() + pb_resp = trigger.Trigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_trigger(resp) + return resp + + class _ListChannelConnections(EventarcRestStub): + def __hash__(self): + return hash("ListChannelConnections") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.ListChannelConnectionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> eventarc.ListChannelConnectionsResponse: + r"""Call the list channel connections method over HTTP. + + Args: + request (~.eventarc.ListChannelConnectionsRequest): + The request object. The request message for the + ListChannelConnections method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.eventarc.ListChannelConnectionsResponse: + The response message for the ``ListChannelConnections`` + method. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', + }, + ] + request, metadata = self._interceptor.pre_list_channel_connections(request, metadata) + pb_request = eventarc.ListChannelConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = eventarc.ListChannelConnectionsResponse() + pb_resp = eventarc.ListChannelConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_channel_connections(resp) + return resp + + class _ListChannels(EventarcRestStub): + def __hash__(self): + return hash("ListChannels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.ListChannelsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> eventarc.ListChannelsResponse: + r"""Call the list channels method over HTTP. + + Args: + request (~.eventarc.ListChannelsRequest): + The request object. The request message for the + ListChannels method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.eventarc.ListChannelsResponse: + The response message for the ``ListChannels`` method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/channels', + }, + ] + request, metadata = self._interceptor.pre_list_channels(request, metadata) + pb_request = eventarc.ListChannelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = eventarc.ListChannelsResponse() + pb_resp = eventarc.ListChannelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_channels(resp) + return resp + + class _ListProviders(EventarcRestStub): + def __hash__(self): + return hash("ListProviders") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.ListProvidersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> eventarc.ListProvidersResponse: + r"""Call the list providers method over HTTP. + + Args: + request (~.eventarc.ListProvidersRequest): + The request object. The request message for the + ListProviders method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.eventarc.ListProvidersResponse: + The response message for the ``ListProviders`` method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/providers', + }, + ] + request, metadata = self._interceptor.pre_list_providers(request, metadata) + pb_request = eventarc.ListProvidersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = eventarc.ListProvidersResponse() + pb_resp = eventarc.ListProvidersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_providers(resp) + return resp + + class _ListTriggers(EventarcRestStub): + def __hash__(self): + return hash("ListTriggers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.ListTriggersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> eventarc.ListTriggersResponse: + r"""Call the list triggers method over HTTP. + + Args: + request (~.eventarc.ListTriggersRequest): + The request object. The request message for the + ListTriggers method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.eventarc.ListTriggersResponse: + The response message for the ``ListTriggers`` method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + }, + ] + request, metadata = self._interceptor.pre_list_triggers(request, metadata) + pb_request = eventarc.ListTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = eventarc.ListTriggersResponse() + pb_resp = eventarc.ListTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_triggers(resp) + return resp + + class _UpdateChannel(EventarcRestStub): + def __hash__(self): + return hash("UpdateChannel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.UpdateChannelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update channel method over HTTP. + + Args: + request (~.eventarc.UpdateChannelRequest): + The request object. The request message for the + UpdateChannel method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{channel.name=projects/*/locations/*/channels/*}', + 'body': 'channel', + }, + ] + request, metadata = self._interceptor.pre_update_channel(request, metadata) + pb_request = eventarc.UpdateChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_channel(resp) + return resp + + class _UpdateGoogleChannelConfig(EventarcRestStub): + def __hash__(self): + return hash("UpdateGoogleChannelConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.UpdateGoogleChannelConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> gce_google_channel_config.GoogleChannelConfig: + r"""Call the update google channel + config method over HTTP. + + Args: + request (~.eventarc.UpdateGoogleChannelConfigRequest): + The request object. The request message for the + UpdateGoogleChannelConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gce_google_channel_config.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}', + 'body': 'google_channel_config', + }, + ] + request, metadata = self._interceptor.pre_update_google_channel_config(request, metadata) + pb_request = eventarc.UpdateGoogleChannelConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gce_google_channel_config.GoogleChannelConfig() + pb_resp = gce_google_channel_config.GoogleChannelConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_google_channel_config(resp) + return resp + + class _UpdateTrigger(EventarcRestStub): + def __hash__(self): + return hash("UpdateTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.UpdateTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update trigger method over HTTP. + + Args: + request (~.eventarc.UpdateTriggerRequest): + The request object. The request message for the + UpdateTrigger method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{trigger.name=projects/*/locations/*/triggers/*}', + 'body': 'trigger', + }, + ] + request, metadata = self._interceptor.pre_update_trigger(request, metadata) + pb_request = eventarc.UpdateTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_trigger(resp) + return resp + + @property + def create_channel_(self) -> Callable[ + [eventarc.CreateChannelRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateChannel(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_channel_connection(self) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateChannelConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_trigger(self) -> Callable[ + [eventarc.CreateTriggerRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_channel(self) -> Callable[ + [eventarc.DeleteChannelRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteChannel(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_channel_connection(self) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteChannelConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_trigger(self) -> Callable[ + [eventarc.DeleteTriggerRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_channel(self) -> Callable[ + [eventarc.GetChannelRequest], + channel.Channel]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetChannel(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_channel_connection(self) -> Callable[ + [eventarc.GetChannelConnectionRequest], + channel_connection.ChannelConnection]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetChannelConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_google_channel_config(self) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + google_channel_config.GoogleChannelConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_provider(self) -> Callable[ + [eventarc.GetProviderRequest], + discovery.Provider]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProvider(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_trigger(self) -> Callable[ + [eventarc.GetTriggerRequest], + trigger.Trigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_channel_connections(self) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + eventarc.ListChannelConnectionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListChannelConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_channels(self) -> Callable[ + [eventarc.ListChannelsRequest], + eventarc.ListChannelsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListChannels(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_providers(self) -> Callable[ + [eventarc.ListProvidersRequest], + eventarc.ListProvidersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProviders(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_triggers(self) -> Callable[ + [eventarc.ListTriggersRequest], + eventarc.ListTriggersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_channel(self) -> Callable[ + [eventarc.UpdateChannelRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateChannel(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_google_channel_config(self) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + gce_google_channel_config.GoogleChannelConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_trigger(self) -> Callable[ + [eventarc.UpdateTriggerRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(EventarcRestStub): + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(EventarcRestStub): + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(EventarcRestStub): + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy', + }, +{ + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy', + }, +{ + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy', + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(EventarcRestStub): + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(EventarcRestStub): + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_trigger(resp) + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) return resp - class _DeleteTrigger(EventarcRestStub): - def __hash__(self): - return hash("DeleteTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + class _CancelOperation(EventarcRestStub): def __call__(self, - request: eventarc.DeleteTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete trigger method over HTTP. + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. Args: - request (~.eventarc.DeleteTriggerRequest): - The request object. The request message for the - DeleteTrigger method. + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - """ http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', }, ] - request, metadata = self._interceptor.pre_delete_trigger(request, metadata) - pb_request = eventarc.DeleteTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) uri = transcoded_request['uri'] method = transcoded_request['method'] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = json.loads(json.dumps(transcoded_request['query_params'])) # Send the request headers = dict(metadata) headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_trigger(resp) - return resp - - class _GetTrigger(EventarcRestStub): - def __hash__(self): - return hash("GetTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return self._interceptor.post_cancel_operation(None) - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + class _DeleteOperation(EventarcRestStub): def __call__(self, - request: eventarc.GetTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> trigger.Trigger: - r"""Call the get trigger method over HTTP. + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. Args: - request (~.eventarc.GetTriggerRequest): - The request object. The request message for the - GetTrigger method. + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - ~.trigger.Trigger: - A representation of the trigger - resource. - """ http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', }, ] - request, metadata = self._interceptor.pre_get_trigger(request, metadata) - pb_request = eventarc.GetTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) uri = transcoded_request['uri'] method = transcoded_request['method'] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = json.loads(json.dumps(transcoded_request['query_params'])) # Send the request headers = dict(metadata) headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = trigger.Trigger() - pb_resp = trigger.Trigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_trigger(resp) - return resp - - class _ListTriggers(EventarcRestStub): - def __hash__(self): - return hash("ListTriggers") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return self._interceptor.post_delete_operation(None) - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + class _GetOperation(EventarcRestStub): def __call__(self, - request: eventarc.ListTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> eventarc.ListTriggersResponse: - r"""Call the list triggers method over HTTP. + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. Args: - request (~.eventarc.ListTriggersRequest): - The request object. The request message for the - ListTriggers method. + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -600,78 +2983,64 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.eventarc.ListTriggersResponse: - The response message for the - ListTriggers method. - + operations_pb2.Operation: Response from GetOperation method. """ http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', }, ] - request, metadata = self._interceptor.pre_list_triggers(request, metadata) - pb_request = eventarc.ListTriggersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) uri = transcoded_request['uri'] method = transcoded_request['method'] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = json.loads(json.dumps(transcoded_request['query_params'])) # Send the request headers = dict(metadata) headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = eventarc.ListTriggersResponse() - pb_resp = eventarc.ListTriggersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_triggers(resp) + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) return resp - class _UpdateTrigger(EventarcRestStub): - def __hash__(self): - return hash("UpdateTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + class _ListOperations(EventarcRestStub): def __call__(self, - request: eventarc.UpdateTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the update trigger method over HTTP. + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. Args: - request (~.eventarc.UpdateTriggerRequest): - The request object. The request message for the - UpdateTrigger method. + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -679,103 +3048,47 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - + operations_pb2.ListOperationsResponse: Response from ListOperations method. """ http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{trigger.name=projects/*/locations/*/triggers/*}', - 'body': 'trigger', + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', }, ] - request, metadata = self._interceptor.pre_update_trigger(request, metadata) - pb_request = eventarc.UpdateTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) uri = transcoded_request['uri'] method = transcoded_request['method'] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = json.loads(json.dumps(transcoded_request['query_params'])) # Send the request headers = dict(metadata) headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_trigger(resp) + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) return resp - @property - def create_trigger(self) -> Callable[ - [eventarc.CreateTriggerRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_trigger(self) -> Callable[ - [eventarc.DeleteTriggerRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_trigger(self) -> Callable[ - [eventarc.GetTriggerRequest], - trigger.Trigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_triggers(self) -> Callable[ - [eventarc.ListTriggersRequest], - eventarc.ListTriggersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_trigger(self) -> Callable[ - [eventarc.UpdateTriggerRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore - @property def kind(self) -> str: return "rest" diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py index 09b1dcca72..66303876a1 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py @@ -13,36 +13,92 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .channel import ( + Channel, +) +from .channel_connection import ( + ChannelConnection, +) +from .discovery import ( + EventType, + FilteringAttribute, + Provider, +) from .eventarc import ( + CreateChannelConnectionRequest, + CreateChannelRequest, CreateTriggerRequest, + DeleteChannelConnectionRequest, + DeleteChannelRequest, DeleteTriggerRequest, + GetChannelConnectionRequest, + GetChannelRequest, + GetGoogleChannelConfigRequest, + GetProviderRequest, GetTriggerRequest, + ListChannelConnectionsRequest, + ListChannelConnectionsResponse, + ListChannelsRequest, + ListChannelsResponse, + ListProvidersRequest, + ListProvidersResponse, ListTriggersRequest, ListTriggersResponse, OperationMetadata, + UpdateChannelRequest, + UpdateGoogleChannelConfigRequest, UpdateTriggerRequest, ) +from .google_channel_config import ( + GoogleChannelConfig, +) from .trigger import ( CloudRun, Destination, EventFilter, + GKE, Pubsub, + StateCondition, Transport, Trigger, ) __all__ = ( + 'Channel', + 'ChannelConnection', + 'EventType', + 'FilteringAttribute', + 'Provider', + 'CreateChannelConnectionRequest', + 'CreateChannelRequest', 'CreateTriggerRequest', + 'DeleteChannelConnectionRequest', + 'DeleteChannelRequest', 'DeleteTriggerRequest', + 'GetChannelConnectionRequest', + 'GetChannelRequest', + 'GetGoogleChannelConfigRequest', + 'GetProviderRequest', 'GetTriggerRequest', + 'ListChannelConnectionsRequest', + 'ListChannelConnectionsResponse', + 'ListChannelsRequest', + 'ListChannelsResponse', + 'ListProvidersRequest', + 'ListProvidersResponse', 'ListTriggersRequest', 'ListTriggersResponse', 'OperationMetadata', + 'UpdateChannelRequest', + 'UpdateGoogleChannelConfigRequest', 'UpdateTriggerRequest', + 'GoogleChannelConfig', 'CloudRun', 'Destination', 'EventFilter', + 'GKE', 'Pubsub', + 'StateCondition', 'Transport', 'Trigger', ) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py new file mode 100755 index 0000000000..e864f65541 --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'Channel', + }, +) + + +class Channel(proto.Message): + r"""A representation of the Channel resource. + A Channel is a resource on which event providers publish their + events. The published events are delivered through the transport + associated with the channel. Note that a channel is associated + with exactly one event provider. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the channel. Must be unique + within the location on the project and must be in + ``projects/{project}/locations/{location}/channels/{channel_id}`` + format. + uid (str): + Output only. Server assigned unique + identifier for the channel. The value is a UUID4 + string and guaranteed to remain unchanged until + the resource is deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + provider (str): + The name of the event provider (e.g. Eventarc SaaS partner) + associated with the channel. This provider will be granted + permissions to publish events to the channel. Format: + ``projects/{project}/locations/{location}/providers/{provider_id}``. + pubsub_topic (str): + Output only. The name of the Pub/Sub topic created and + managed by Eventarc system as a transport for the event + delivery. Format: ``projects/{project}/topics/{topic_id}``. + + This field is a member of `oneof`_ ``transport``. + state (google.cloud.eventarc_v1.types.Channel.State): + Output only. The state of a Channel. + activation_token (str): + Output only. The activation token for the + channel. The token must be used by the provider + to register the channel for publishing. + crypto_key_name (str): + Optional. Resource name of a KMS crypto key (managed by the + user) used to encrypt/decrypt their event data. + + It must match the pattern + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + """ + class State(proto.Enum): + r"""State lists all the possible states of a Channel + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + PENDING (1): + The PENDING state indicates that a Channel + has been created successfully and there is a new + activation token available for the subscriber to + use to convey the Channel to the provider in + order to create a Connection. + ACTIVE (2): + The ACTIVE state indicates that a Channel has + been successfully connected with the event + provider. An ACTIVE Channel is ready to receive + and route events from the event provider. + INACTIVE (3): + The INACTIVE state indicates that the Channel + cannot receive events permanently. There are two + possible cases this state can happen: + + 1. The SaaS provider disconnected from this + Channel. + 2. The Channel activation token has expired but + the SaaS provider wasn't connected. + + To re-establish a Connection with a provider, + the subscriber should create a new Channel and + give it to the provider. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + ACTIVE = 2 + INACTIVE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + provider: str = proto.Field( + proto.STRING, + number=7, + ) + pubsub_topic: str = proto.Field( + proto.STRING, + number=8, + oneof='transport', + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + activation_token: str = proto.Field( + proto.STRING, + number=10, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=11, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py new file mode 100755 index 0000000000..301d8832c1 --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'ChannelConnection', + }, +) + + +class ChannelConnection(proto.Message): + r"""A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event providers create + during the activation process to establish a connection between + the provider and the subscriber channel. + + Attributes: + name (str): + Required. The name of the connection. + uid (str): + Output only. Server assigned ID of the + resource. The server guarantees uniqueness and + immutability until deleted. + channel (str): + Required. The name of the connected subscriber Channel. This + is a weak reference to avoid cross project and cross + accounts references. This must be in + ``projects/{project}/location/{location}/channels/{channel_id}`` + format. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + activation_token (str): + Input only. Activation token for the channel. + The token will be used during the creation of + ChannelConnection to bind the channel with the + provider project. This field will not be stored + in the provider resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + channel: str = proto.Field( + proto.STRING, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + activation_token: str = proto.Field( + proto.STRING, + number=8, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py new file mode 100755 index 0000000000..36a152e7df --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'Provider', + 'EventType', + 'FilteringAttribute', + }, +) + + +class Provider(proto.Message): + r"""A representation of the Provider resource. + + Attributes: + name (str): + Output only. In + ``projects/{project}/locations/{location}/providers/{provider_id}`` + format. + display_name (str): + Output only. Human friendly name for the + Provider. For example "Cloud Storage". + event_types (MutableSequence[google.cloud.eventarc_v1.types.EventType]): + Output only. Event types for this provider. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + event_types: MutableSequence['EventType'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='EventType', + ) + + +class EventType(proto.Message): + r"""A representation of the event type resource. + + Attributes: + type_ (str): + Output only. The full name of the event type + (for example, + "google.cloud.storage.object.v1.finalized"). In + the form of + {provider-specific-prefix}.{resource}.{version}.{verb}. + Types MUST be versioned and event schemas are + guaranteed to remain backward compatible within + one version. Note that event type versions and + API versions do not need to match. + description (str): + Output only. Human friendly description of + what the event type is about. For example + "Bucket created in Cloud Storage". + filtering_attributes (MutableSequence[google.cloud.eventarc_v1.types.FilteringAttribute]): + Output only. Filtering attributes for the + event type. + event_schema_uri (str): + Output only. URI for the event schema. + For example + "https://github.com/googleapis/google-cloudevents/blob/master/proto/google/events/cloud/storage/v1/events.proto". + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + filtering_attributes: MutableSequence['FilteringAttribute'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='FilteringAttribute', + ) + event_schema_uri: str = proto.Field( + proto.STRING, + number=4, + ) + + +class FilteringAttribute(proto.Message): + r"""A representation of the FilteringAttribute resource. + Filtering attributes are per event type. + + Attributes: + attribute (str): + Output only. Attribute used for filtering the + event type. + description (str): + Output only. Description of the purpose of + the attribute. + required (bool): + Output only. If true, the triggers for this + provider should always specify a filter on these + attributes. Trigger creation will fail + otherwise. + path_pattern_supported (bool): + Output only. If true, the attribute accepts + matching expressions in the Eventarc PathPattern + format. + """ + + attribute: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + required: bool = proto.Field( + proto.BOOL, + number=3, + ) + path_pattern_supported: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index cb07e3c99e..0e737707c8 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -19,6 +19,10 @@ import proto # type: ignore +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -33,6 +37,22 @@ 'CreateTriggerRequest', 'UpdateTriggerRequest', 'DeleteTriggerRequest', + 'GetChannelRequest', + 'ListChannelsRequest', + 'ListChannelsResponse', + 'CreateChannelRequest', + 'UpdateChannelRequest', + 'DeleteChannelRequest', + 'GetProviderRequest', + 'ListProvidersRequest', + 'ListProvidersResponse', + 'GetChannelConnectionRequest', + 'ListChannelConnectionsRequest', + 'ListChannelConnectionsResponse', + 'CreateChannelConnectionRequest', + 'DeleteChannelConnectionRequest', + 'UpdateGoogleChannelConfigRequest', + 'GetGoogleChannelConfigRequest', 'OperationMetadata', }, ) @@ -61,7 +81,8 @@ class ListTriggersRequest(proto.Message): triggers on. page_size (int): The maximum number of triggers to return on - each page. Note: The service may send fewer. + each page. + Note: The service may send fewer. page_token (str): The page token; provide the value from the ``next_page_token`` field in a previous ``ListTriggers`` @@ -72,10 +93,16 @@ class ListTriggersRequest(proto.Message): token. order_by (str): The sorting order of the resources returned. Value should be - a comma separated list of fields. The default sorting oder + a comma-separated list of fields. The default sorting order is ascending. To specify descending order for a field, append a ``desc`` suffix; for example: ``name desc, trigger_id``. + filter (str): + Filter field. Used to filter the Triggers to + be listed. Possible filters are described in + https://google.aip.dev/160. For example, using + "?filter=destination:gke" would list only + Triggers with a gke destination. """ parent: str = proto.Field( @@ -94,19 +121,23 @@ class ListTriggersRequest(proto.Message): proto.STRING, number=4, ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) class ListTriggersResponse(proto.Message): - r"""The response message for the ListTriggers method. + r"""The response message for the ``ListTriggers`` method. Attributes: triggers (MutableSequence[google.cloud.eventarc_v1.types.Trigger]): The requested triggers, up to the number specified in ``page_size``. next_page_token (str): - A page token that can be sent to ListTriggers - to request the next page. If this is empty, then - there are no more pages. + A page token that can be sent to ``ListTriggers`` to request + the next page. If this is empty, then there are no more + pages. unreachable (MutableSequence[str]): Unreachable resources, if any. """ @@ -144,7 +175,7 @@ class CreateTriggerRequest(proto.Message): to the trigger. validate_only (bool): Required. If set, validate the request and - preview the review, but do not actually post it. + preview the review, but do not post it. """ parent: str = proto.Field( @@ -174,8 +205,8 @@ class UpdateTriggerRequest(proto.Message): The trigger to be updated. update_mask (google.protobuf.field_mask_pb2.FieldMask): The fields to be updated; only fields explicitly provided - will be updated. If no field mask is provided, all provided - fields in the request will be updated. To update all fields, + are updated. If no field mask is provided, all provided + fields in the request are updated. To update all fields, provide a field mask of "*". allow_missing (bool): If set to true, and the trigger is not found, a new trigger @@ -183,7 +214,7 @@ class UpdateTriggerRequest(proto.Message): ignored. validate_only (bool): Required. If set, validate the request and - preview the review, but do not actually post it. + preview the review, but do not post it. """ trigger: gce_trigger.Trigger = proto.Field( @@ -223,7 +254,7 @@ class DeleteTriggerRequest(proto.Message): taken on the server. validate_only (bool): Required. If set, validate the request and - preview the review, but do not actually post it. + preview the review, but do not post it. """ name: str = proto.Field( @@ -244,6 +275,454 @@ class DeleteTriggerRequest(proto.Message): ) +class GetChannelRequest(proto.Message): + r"""The request message for the GetChannel method. + + Attributes: + name (str): + Required. The name of the channel to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListChannelsRequest(proto.Message): + r"""The request message for the ListChannels method. + + Attributes: + parent (str): + Required. The parent collection to list + channels on. + page_size (int): + The maximum number of channels to return on + each page. + Note: The service may send fewer. + page_token (str): + The page token; provide the value from the + ``next_page_token`` field in a previous ``ListChannels`` + call to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListChannels`` must match the call that provided the page + token. + order_by (str): + The sorting order of the resources returned. Value should be + a comma-separated list of fields. The default sorting order + is ascending. To specify descending order for a field, + append a ``desc`` suffix; for example: + ``name desc, channel_id``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListChannelsResponse(proto.Message): + r"""The response message for the ``ListChannels`` method. + + Attributes: + channels (MutableSequence[google.cloud.eventarc_v1.types.Channel]): + The requested channels, up to the number specified in + ``page_size``. + next_page_token (str): + A page token that can be sent to ``ListChannels`` to request + the next page. If this is empty, then there are no more + pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + channels: MutableSequence[gce_channel.Channel] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_channel.Channel, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateChannelRequest(proto.Message): + r"""The request message for the CreateChannel method. + + Attributes: + parent (str): + Required. The parent collection in which to + add this channel. + channel (google.cloud.eventarc_v1.types.Channel): + Required. The channel to create. + channel_id (str): + Required. The user-provided ID to be assigned + to the channel. + validate_only (bool): + Required. If set, validate the request and + preview the review, but do not post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + channel: gce_channel.Channel = proto.Field( + proto.MESSAGE, + number=2, + message=gce_channel.Channel, + ) + channel_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateChannelRequest(proto.Message): + r"""The request message for the UpdateChannel method. + + Attributes: + channel (google.cloud.eventarc_v1.types.Channel): + The channel to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly provided + are updated. If no field mask is provided, all provided + fields in the request are updated. To update all fields, + provide a field mask of "*". + validate_only (bool): + Required. If set, validate the request and + preview the review, but do not post it. + """ + + channel: gce_channel.Channel = proto.Field( + proto.MESSAGE, + number=1, + message=gce_channel.Channel, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteChannelRequest(proto.Message): + r"""The request message for the DeleteChannel method. + + Attributes: + name (str): + Required. The name of the channel to be + deleted. + validate_only (bool): + Required. If set, validate the request and + preview the review, but do not post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class GetProviderRequest(proto.Message): + r"""The request message for the GetProvider method. + + Attributes: + name (str): + Required. The name of the provider to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListProvidersRequest(proto.Message): + r"""The request message for the ListProviders method. + + Attributes: + parent (str): + Required. The parent of the provider to get. + page_size (int): + The maximum number of providers to return on + each page. + page_token (str): + The page token; provide the value from the + ``next_page_token`` field in a previous ``ListProviders`` + call to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListProviders`` must match the call that provided the page + token. + order_by (str): + The sorting order of the resources returned. Value should be + a comma-separated list of fields. The default sorting oder + is ascending. To specify descending order for a field, + append a ``desc`` suffix; for example: ``name desc, _id``. + filter (str): + The filter field that the list request will + filter on. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListProvidersResponse(proto.Message): + r"""The response message for the ``ListProviders`` method. + + Attributes: + providers (MutableSequence[google.cloud.eventarc_v1.types.Provider]): + The requested providers, up to the number specified in + ``page_size``. + next_page_token (str): + A page token that can be sent to ``ListProviders`` to + request the next page. If this is empty, then there are no + more pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + providers: MutableSequence[discovery.Provider] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=discovery.Provider, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetChannelConnectionRequest(proto.Message): + r"""The request message for the GetChannelConnection method. + + Attributes: + name (str): + Required. The name of the channel connection + to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListChannelConnectionsRequest(proto.Message): + r"""The request message for the ListChannelConnections method. + + Attributes: + parent (str): + Required. The parent collection from which to + list channel connections. + page_size (int): + The maximum number of channel connections to + return on each page. + Note: The service may send fewer responses. + page_token (str): + The page token; provide the value from the + ``next_page_token`` field in a previous + ``ListChannelConnections`` call to retrieve the subsequent + page. + + When paginating, all other parameters provided to + ``ListChannelConnetions`` match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListChannelConnectionsResponse(proto.Message): + r"""The response message for the ``ListChannelConnections`` method. + + Attributes: + channel_connections (MutableSequence[google.cloud.eventarc_v1.types.ChannelConnection]): + The requested channel connections, up to the number + specified in ``page_size``. + next_page_token (str): + A page token that can be sent to ``ListChannelConnections`` + to request the next page. If this is empty, then there are + no more pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + channel_connections: MutableSequence[gce_channel_connection.ChannelConnection] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_channel_connection.ChannelConnection, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateChannelConnectionRequest(proto.Message): + r"""The request message for the CreateChannelConnection method. + + Attributes: + parent (str): + Required. The parent collection in which to + add this channel connection. + channel_connection (google.cloud.eventarc_v1.types.ChannelConnection): + Required. Channel connection to create. + channel_connection_id (str): + Required. The user-provided ID to be assigned + to the channel connection. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + channel_connection: gce_channel_connection.ChannelConnection = proto.Field( + proto.MESSAGE, + number=2, + message=gce_channel_connection.ChannelConnection, + ) + channel_connection_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteChannelConnectionRequest(proto.Message): + r"""The request message for the DeleteChannelConnection method. + + Attributes: + name (str): + Required. The name of the channel connection + to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateGoogleChannelConfigRequest(proto.Message): + r"""The request message for the UpdateGoogleChannelConfig method. + + Attributes: + google_channel_config (google.cloud.eventarc_v1.types.GoogleChannelConfig): + Required. The config to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly provided + are updated. If no field mask is provided, all provided + fields in the request are updated. To update all fields, + provide a field mask of "*". + """ + + google_channel_config: gce_google_channel_config.GoogleChannelConfig = proto.Field( + proto.MESSAGE, + number=1, + message=gce_google_channel_config.GoogleChannelConfig, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetGoogleChannelConfigRequest(proto.Message): + r"""The request message for the GetGoogleChannelConfig method. + + Attributes: + name (str): + Required. The name of the config to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py new file mode 100755 index 0000000000..291ebd01bd --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'GoogleChannelConfig', + }, +) + + +class GoogleChannelConfig(proto.Message): + r"""A GoogleChannelConfig is a resource that stores the custom + settings respected by Eventarc first-party triggers in the + matching region. Once configured, first-party event data will be + protected using the specified custom managed encryption key + instead of Google-managed encryption keys. + + Attributes: + name (str): + Required. The resource name of the config. Must be in the + format of, + ``projects/{project}/locations/{location}/googleChannelConfig``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + crypto_key_name (str): + Optional. Resource name of a KMS crypto key (managed by the + user) used to encrypt/decrypt their event data. + + It must match the pattern + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index e7f7acdd0b..86ba6a41ca 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -20,6 +20,7 @@ import proto # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore __protobuf__ = proto.module( @@ -27,9 +28,11 @@ manifest={ 'Trigger', 'EventFilter', + 'StateCondition', 'Destination', 'Transport', 'CloudRun', + 'GKE', 'Pubsub', }, ) @@ -41,11 +44,11 @@ class Trigger(proto.Message): Attributes: name (str): Required. The resource name of the trigger. Must be unique - within the location on the project and must be in + within the location of the project and must be in ``projects/{project}/locations/{location}/triggers/{trigger}`` format. uid (str): - Output only. Server assigned unique + Output only. Server-assigned unique identifier for the trigger. The value is a UUID4 string and guaranteed to remain unchanged until the resource is deleted. @@ -54,16 +57,16 @@ class Trigger(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last-modified time. event_filters (MutableSequence[google.cloud.eventarc_v1.types.EventFilter]): - Required. null The list of filters that - applies to event attributes. Only events that - match all the provided filters will be sent to + Required. Unordered list. The list of filters + that applies to event attributes. Only events + that match all the provided filters are sent to the destination. service_account (str): Optional. The IAM service account email associated with the trigger. The service account represents the identity of the trigger. - The principal who calls this API must have + The principal who calls this API must have the ``iam.serviceAccounts.actAs`` permission in the service account. See https://cloud.google.com/iam/docs/understanding-service-accounts?hl=en#sa_common @@ -73,26 +76,35 @@ class Trigger(proto.Message): generate identity tokens when invoking the service. See https://cloud.google.com/run/docs/triggering/pubsub-push#create-service-account for information on how to invoke authenticated Cloud Run - services. In order to create Audit Log triggers, the service - account should also have ``roles/eventarc.eventReceiver`` - IAM role. + services. To create Audit Log triggers, the service account + should also have the ``roles/eventarc.eventReceiver`` IAM + role. destination (google.cloud.eventarc_v1.types.Destination): Required. Destination specifies where the events should be sent to. transport (google.cloud.eventarc_v1.types.Transport): - Optional. In order to deliver messages, - Eventarc may use other GCP products as transport + Optional. To deliver messages, Eventarc might + use other GCP products as a transport intermediary. This field contains a reference to that transport intermediary. This information can be used for debugging purposes. labels (MutableMapping[str, str]): Optional. User labels attached to the triggers that can be used to group resources. + channel (str): + Optional. The name of the channel associated with the + trigger in + ``projects/{project}/locations/{location}/channels/{channel}`` + format. You must provide a channel to receive events from + Eventarc SaaS partners. + conditions (MutableMapping[str, google.cloud.eventarc_v1.types.StateCondition]): + Output only. The reason(s) why a trigger is + in FAILED state. etag (str): Output only. This checksum is computed by the server based on the value of other fields, and - may be sent only on create requests to ensure - the client has an up-to-date value before + might be sent only on create requests to ensure + that the client has an up-to-date value before proceeding. """ @@ -138,6 +150,16 @@ class Trigger(proto.Message): proto.STRING, number=12, ) + channel: str = proto.Field( + proto.STRING, + number=13, + ) + conditions: MutableMapping[str, 'StateCondition'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=15, + message='StateCondition', + ) etag: str = proto.Field( proto.STRING, number=99, @@ -153,10 +175,16 @@ class EventFilter(proto.Message): Required. The name of a CloudEvents attribute. Currently, only a subset of attributes are supported for filtering. + All triggers MUST provide a filter for the 'type' attribute. value (str): Required. The value for the attribute. + operator (str): + Optional. The operator used for matching the events with the + value of the filter. If not specified, only events that have + an exact key-value pair specified in the filter are matched. + The only allowed value is ``match-path-pattern``. """ attribute: str = proto.Field( @@ -167,18 +195,67 @@ class EventFilter(proto.Message): proto.STRING, number=2, ) + operator: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StateCondition(proto.Message): + r"""A condition that is part of the trigger state computation. + + Attributes: + code (google.rpc.code_pb2.Code): + The canonical code of the condition. + message (str): + Human-readable message. + """ + + code: code_pb2.Code = proto.Field( + proto.ENUM, + number=1, + enum=code_pb2.Code, + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) class Destination(proto.Message): r"""Represents a target of an invocation over HTTP. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: cloud_run (google.cloud.eventarc_v1.types.CloudRun): - Cloud Run fully-managed service that receives - the events. The service should be running in the - same project of the trigger. + Cloud Run fully-managed resource that + receives the events. The resource should be in + the same project as the trigger. + + This field is a member of `oneof`_ ``descriptor``. + cloud_function (str): + The Cloud Function resource name. Only Cloud Functions V2 is + supported. Format: + ``projects/{project}/locations/{location}/functions/{function}`` + + This field is a member of `oneof`_ ``descriptor``. + gke (google.cloud.eventarc_v1.types.GKE): + A GKE service capable of receiving events. + The service should be running in the same + project as the trigger. + + This field is a member of `oneof`_ ``descriptor``. + workflow (str): + The resource name of the Workflow whose Executions are + triggered by the events. The Workflow resource should be + deployed in the same project as the trigger. Format: + ``projects/{project}/locations/{location}/workflows/{workflow}`` This field is a member of `oneof`_ ``descriptor``. """ @@ -189,11 +266,27 @@ class Destination(proto.Message): oneof='descriptor', message='CloudRun', ) + cloud_function: str = proto.Field( + proto.STRING, + number=2, + oneof='descriptor', + ) + gke: 'GKE' = proto.Field( + proto.MESSAGE, + number=3, + oneof='descriptor', + message='GKE', + ) + workflow: str = proto.Field( + proto.STRING, + number=4, + oneof='descriptor', + ) class Transport(proto.Message): r"""Represents the transport intermediaries created for the - trigger in order to deliver events. + trigger to deliver events. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -201,7 +294,7 @@ class Transport(proto.Message): Attributes: pubsub (google.cloud.eventarc_v1.types.Pubsub): The Pub/Sub topic and subscription used by - Eventarc as delivery intermediary. + Eventarc as a transport intermediary. This field is a member of `oneof`_ ``intermediary``. """ @@ -222,15 +315,15 @@ class CloudRun(proto.Message): Required. The name of the Cloud Run service being addressed. See https://cloud.google.com/run/docs/reference/rest/v1/namespaces.services. - Only services located in the same project of the + + Only services located in the same project as the trigger object can be addressed. path (str): Optional. The relative path on the Cloud Run service the events should be sent to. - - The value must conform to the definition of URI - path segment (section 3.3 of RFC2396). Examples: - "/route", "route", "route/subroute". + The value must conform to the definition of a + URI path segment (section 3.3 of RFC2396). + Examples: "/route", "route", "route/subroute". region (str): Required. The region the Cloud Run service is deployed in. @@ -250,22 +343,73 @@ class CloudRun(proto.Message): ) +class GKE(proto.Message): + r"""Represents a GKE destination. + + Attributes: + cluster (str): + Required. The name of the cluster the GKE + service is running in. The cluster must be + running in the same project as the trigger being + created. + location (str): + Required. The name of the Google Compute + Engine in which the cluster resides, which can + either be compute zone (for example, + us-central1-a) for the zonal clusters or region + (for example, us-central1) for regional + clusters. + namespace (str): + Required. The namespace the GKE service is + running in. + service (str): + Required. Name of the GKE service. + path (str): + Optional. The relative path on the GKE + service the events should be sent to. + The value must conform to the definition of a + URI path segment (section 3.3 of RFC2396). + Examples: "/route", "route", "route/subroute". + """ + + cluster: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + namespace: str = proto.Field( + proto.STRING, + number=3, + ) + service: str = proto.Field( + proto.STRING, + number=4, + ) + path: str = proto.Field( + proto.STRING, + number=5, + ) + + class Pubsub(proto.Message): r"""Represents a Pub/Sub transport. Attributes: topic (str): Optional. The name of the Pub/Sub topic created and managed - by Eventarc system as a transport for the event delivery. - Format: ``projects/{PROJECT_ID}/topics/{TOPIC_NAME}``. + by Eventarc as a transport for the event delivery. Format: + ``projects/{PROJECT_ID}/topics/{TOPIC_NAME}``. - You may set an existing topic for triggers of the type - ``google.cloud.pubsub.topic.v1.messagePublished`` only. The - topic you provide here will not be deleted by Eventarc at - trigger deletion. + You can set an existing topic for triggers of the type + ``google.cloud.pubsub.topic.v1.messagePublished``. The topic + you provide here is not deleted by Eventarc at trigger + deletion. subscription (str): Output only. The name of the Pub/Sub subscription created - and managed by Eventarc system as a transport for the event + and managed by Eventarc as a transport for the event delivery. Format: ``projects/{PROJECT_ID}/subscriptions/{SUBSCRIPTION_NAME}``. """ diff --git a/tests/integration/goldens/eventarc/noxfile.py b/tests/integration/goldens/eventarc/noxfile.py index 08024a9112..6d840f9658 100755 --- a/tests/integration/goldens/eventarc/noxfile.py +++ b/tests/integration/goldens/eventarc/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py new file mode 100755 index 0000000000..4dbb67d6ad --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateChannel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_create_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + channel = eventarc_v1.Channel() + channel.pubsub_topic = "pubsub_topic_value" + channel.name = "name_value" + + request = eventarc_v1.CreateChannelRequest( + parent="parent_value", + channel=channel, + channel_id="channel_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateChannel_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py new file mode 100755 index 0000000000..d740fe73e5 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateChannelConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_create_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + channel_connection = eventarc_v1.ChannelConnection() + channel_connection.name = "name_value" + channel_connection.channel = "channel_value" + + request = eventarc_v1.CreateChannelConnectionRequest( + parent="parent_value", + channel_connection=channel_connection, + channel_connection_id="channel_connection_id_value", + ) + + # Make the request + operation = client.create_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateChannelConnection_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py new file mode 100755 index 0000000000..1743fe90e2 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateChannelConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_create_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + channel_connection = eventarc_v1.ChannelConnection() + channel_connection.name = "name_value" + channel_connection.channel = "channel_value" + + request = eventarc_v1.CreateChannelConnectionRequest( + parent="parent_value", + channel_connection=channel_connection, + channel_connection_id="channel_connection_id_value", + ) + + # Make the request + operation = client.create_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateChannelConnection_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py new file mode 100755 index 0000000000..899225b0e4 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateChannel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_create_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + channel = eventarc_v1.Channel() + channel.pubsub_topic = "pubsub_topic_value" + channel.name = "name_value" + + request = eventarc_v1.CreateChannelRequest( + parent="parent_value", + channel=channel, + channel_id="channel_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateChannel_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py new file mode 100755 index 0000000000..8cffd31dc1 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteChannel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_delete_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteChannel_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py new file mode 100755 index 0000000000..4de62a3619 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteChannelConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_delete_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteChannelConnection_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py new file mode 100755 index 0000000000..b549b4daf8 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteChannelConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_delete_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteChannelConnection_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py new file mode 100755 index 0000000000..d640d98726 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteChannel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_delete_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteChannel_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py new file mode 100755 index 0000000000..482e80511e --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetChannel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_channel(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetChannel_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py new file mode 100755 index 0000000000..e3300768b6 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetChannelConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_channel_connection(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetChannelConnection_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py new file mode 100755 index 0000000000..23fe5839bd --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetChannelConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_channel_connection(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetChannelConnection_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py new file mode 100755 index 0000000000..f56a7c44e5 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetChannel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelRequest( + name="name_value", + ) + + # Make the request + response = client.get_channel(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetChannel_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py new file mode 100755 index 0000000000..fd03d024cc --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGoogleChannelConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleChannelConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_google_channel_config(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py new file mode 100755 index 0000000000..273ee21a7a --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGoogleChannelConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleChannelConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_google_channel_config(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py new file mode 100755 index 0000000000..477e2b4750 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProvider +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetProvider_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_provider(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetProviderRequest( + name="name_value", + ) + + # Make the request + response = await client.get_provider(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetProvider_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py new file mode 100755 index 0000000000..121fe759f9 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProvider +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetProvider_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_provider(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetProviderRequest( + name="name_value", + ) + + # Make the request + response = client.get_provider(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetProvider_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py new file mode 100755 index 0000000000..a9f93081bc --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChannelConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListChannelConnections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_channel_connections(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channel_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListChannelConnections_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py new file mode 100755 index 0000000000..0f6bbfb23c --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChannelConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListChannelConnections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_channel_connections(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channel_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListChannelConnections_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py new file mode 100755 index 0000000000..4cfb527880 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChannels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListChannels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_channels(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channels(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListChannels_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py new file mode 100755 index 0000000000..6caa5f6e86 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChannels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListChannels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_channels(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channels(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListChannels_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py new file mode 100755 index 0000000000..af9e9b81bc --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProviders +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListProviders_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_providers(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListProvidersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_providers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListProviders_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py new file mode 100755 index 0000000000..6f24cdd557 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProviders +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListProviders_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_providers(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListProvidersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_providers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListProviders_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py new file mode 100755 index 0000000000..83c308256a --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateChannel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_update_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateChannelRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateChannel_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py new file mode 100755 index 0000000000..9f2001c813 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateChannel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_update_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateChannelRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateChannel_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py new file mode 100755 index 0000000000..59d955d631 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGoogleChannelConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_update_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + google_channel_config = eventarc_v1.GoogleChannelConfig() + google_channel_config.name = "name_value" + + request = eventarc_v1.UpdateGoogleChannelConfigRequest( + google_channel_config=google_channel_config, + ) + + # Make the request + response = await client.update_google_channel_config(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py new file mode 100755 index 0000000000..d39449bc25 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGoogleChannelConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_update_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + google_channel_config = eventarc_v1.GoogleChannelConfig() + google_channel_config.name = "name_value" + + request = eventarc_v1.UpdateGoogleChannelConfigRequest( + google_channel_config=google_channel_config, + ) + + # Make the request + response = client.update_google_channel_config(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index d26627397b..596aaecb1b 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json +++ b/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -19,30 +19,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "CreateTrigger" + "shortName": "CreateChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateChannelConnectionRequest" }, { "name": "parent", "type": "str" }, { - "name": "trigger", - "type": "google.cloud.eventarc_v1.types.Trigger" + "name": "channel_connection", + "type": "google.cloud.eventarc_v1.types.ChannelConnection" }, { - "name": "trigger_id", + "name": "channel_connection_id", "type": "str" }, { @@ -59,21 +59,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_trigger" + "shortName": "create_channel_connection" }, - "description": "Sample for CreateTrigger", - "file": "eventarc_v1_generated_eventarc_create_trigger_async.py", + "description": "Sample for CreateChannelConnection", + "file": "eventarc_v1_generated_eventarc_create_channel_connection_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateChannelConnection_async", "segments": [ { - "end": 65, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 65, + "end": 61, "start": 27, "type": "SHORT" }, @@ -83,22 +83,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 55, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 62, - "start": 56, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 66, - "start": 63, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_create_trigger_async.py" + "title": "eventarc_v1_generated_eventarc_create_channel_connection_async.py" }, { "canonical": true, @@ -107,30 +107,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.create_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "CreateTrigger" + "shortName": "CreateChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateChannelConnectionRequest" }, { "name": "parent", "type": "str" }, { - "name": "trigger", - "type": "google.cloud.eventarc_v1.types.Trigger" + "name": "channel_connection", + "type": "google.cloud.eventarc_v1.types.ChannelConnection" }, { - "name": "trigger_id", + "name": "channel_connection_id", "type": "str" }, { @@ -147,21 +147,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_trigger" + "shortName": "create_channel_connection" }, - "description": "Sample for CreateTrigger", - "file": "eventarc_v1_generated_eventarc_create_trigger_sync.py", + "description": "Sample for CreateChannelConnection", + "file": "eventarc_v1_generated_eventarc_create_channel_connection_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateChannelConnection_sync", "segments": [ { - "end": 65, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 65, + "end": 61, "start": 27, "type": "SHORT" }, @@ -171,22 +171,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 55, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 62, - "start": 56, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 66, - "start": 63, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_create_trigger_sync.py" + "title": "eventarc_v1_generated_eventarc_create_channel_connection_sync.py" }, { "canonical": true, @@ -196,27 +196,31 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_channel", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateChannel", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteTrigger" + "shortName": "CreateChannel" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateChannelRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { - "name": "allow_missing", - "type": "bool" + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "channel_id", + "type": "str" }, { "name": "retry", @@ -232,21 +236,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_trigger" + "shortName": "create_channel" }, - "description": "Sample for DeleteTrigger", - "file": "eventarc_v1_generated_eventarc_delete_trigger_async.py", + "description": "Sample for CreateChannel", + "file": "eventarc_v1_generated_eventarc_create_channel_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateChannel_async", "segments": [ { - "end": 56, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 62, "start": 27, "type": "SHORT" }, @@ -256,22 +260,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_trigger_async.py" + "title": "eventarc_v1_generated_eventarc_create_channel_async.py" }, { "canonical": true, @@ -280,27 +284,31 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_channel", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateChannel", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteTrigger" + "shortName": "CreateChannel" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateChannelRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { - "name": "allow_missing", - "type": "bool" + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "channel_id", + "type": "str" }, { "name": "retry", @@ -316,21 +324,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_trigger" + "shortName": "create_channel" }, - "description": "Sample for DeleteTrigger", - "file": "eventarc_v1_generated_eventarc_delete_trigger_sync.py", + "description": "Sample for CreateChannel", + "file": "eventarc_v1_generated_eventarc_create_channel_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateChannel_sync", "segments": [ { - "end": 56, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 62, "start": 27, "type": "SHORT" }, @@ -340,22 +348,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_trigger_sync.py" + "title": "eventarc_v1_generated_eventarc_create_channel_sync.py" }, { "canonical": true, @@ -365,22 +373,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_trigger", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetTrigger" + "shortName": "CreateTrigger" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "trigger_id", "type": "str" }, { @@ -396,22 +412,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.eventarc_v1.types.Trigger", - "shortName": "get_trigger" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_trigger" }, - "description": "Sample for GetTrigger", - "file": "eventarc_v1_generated_eventarc_get_trigger_async.py", + "description": "Sample for CreateTrigger", + "file": "eventarc_v1_generated_eventarc_create_trigger_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_async", "segments": [ { - "end": 51, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 65, "start": 27, "type": "SHORT" }, @@ -421,22 +437,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 55, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_trigger_async.py" + "title": "eventarc_v1_generated_eventarc_create_trigger_async.py" }, { "canonical": true, @@ -445,22 +461,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.get_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_trigger", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetTrigger" + "shortName": "CreateTrigger" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "trigger_id", "type": "str" }, { @@ -476,22 +500,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.eventarc_v1.types.Trigger", - "shortName": "get_trigger" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_trigger" }, - "description": "Sample for GetTrigger", - "file": "eventarc_v1_generated_eventarc_get_trigger_sync.py", + "description": "Sample for CreateTrigger", + "file": "eventarc_v1_generated_eventarc_create_trigger_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_sync", "segments": [ { - "end": 51, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 65, "start": 27, "type": "SHORT" }, @@ -501,22 +525,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 55, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_trigger_sync.py" + "title": "eventarc_v1_generated_eventarc_create_trigger_sync.py" }, { "canonical": true, @@ -526,22 +550,22 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_triggers", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListTriggers" + "shortName": "DeleteChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + "type": "google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -557,22 +581,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager", - "shortName": "list_triggers" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_channel_connection" }, - "description": "Sample for ListTriggers", - "file": "eventarc_v1_generated_eventarc_list_triggers_async.py", + "description": "Sample for DeleteChannelConnection", + "file": "eventarc_v1_generated_eventarc_delete_channel_connection_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_async", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannelConnection_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -587,17 +611,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_triggers_async.py" + "title": "eventarc_v1_generated_eventarc_delete_channel_connection_async.py" }, { "canonical": true, @@ -606,22 +630,22 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.list_triggers", + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListTriggers" + "shortName": "DeleteChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + "type": "google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -637,22 +661,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager", - "shortName": "list_triggers" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_channel_connection" }, - "description": "Sample for ListTriggers", - "file": "eventarc_v1_generated_eventarc_list_triggers_sync.py", + "description": "Sample for DeleteChannelConnection", + "file": "eventarc_v1_generated_eventarc_delete_channel_connection_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_sync", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannelConnection_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -667,17 +691,2134 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_triggers_sync.py" + "title": "eventarc_v1_generated_eventarc_delete_channel_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_channel" + }, + "description": "Sample for DeleteChannel", + "file": "eventarc_v1_generated_eventarc_delete_channel_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannel_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_channel_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_channel" + }, + "description": "Sample for DeleteChannel", + "file": "eventarc_v1_generated_eventarc_delete_channel_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannel_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_channel_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_trigger" + }, + "description": "Sample for DeleteTrigger", + "file": "eventarc_v1_generated_eventarc_delete_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_trigger" + }, + "description": "Sample for DeleteTrigger", + "file": "eventarc_v1_generated_eventarc_delete_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_channel_connection", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannelConnection", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannelConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", + "shortName": "get_channel_connection" + }, + "description": "Sample for GetChannelConnection", + "file": "eventarc_v1_generated_eventarc_get_channel_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannelConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_channel_connection", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannelConnection", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannelConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", + "shortName": "get_channel_connection" + }, + "description": "Sample for GetChannelConnection", + "file": "eventarc_v1_generated_eventarc_get_channel_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannelConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Channel", + "shortName": "get_channel" + }, + "description": "Sample for GetChannel", + "file": "eventarc_v1_generated_eventarc_get_channel_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Channel", + "shortName": "get_channel" + }, + "description": "Sample for GetChannel", + "file": "eventarc_v1_generated_eventarc_get_channel_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "get_google_channel_config" + }, + "description": "Sample for GetGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_get_google_channel_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_google_channel_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "get_google_channel_config" + }, + "description": "Sample for GetGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_get_google_channel_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_google_channel_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_provider", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetProvider", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetProvider" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetProviderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Provider", + "shortName": "get_provider" + }, + "description": "Sample for GetProvider", + "file": "eventarc_v1_generated_eventarc_get_provider_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetProvider_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_provider_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_provider", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetProvider", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetProvider" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetProviderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Provider", + "shortName": "get_provider" + }, + "description": "Sample for GetProvider", + "file": "eventarc_v1_generated_eventarc_get_provider_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetProvider_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_provider_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Trigger", + "shortName": "get_trigger" + }, + "description": "Sample for GetTrigger", + "file": "eventarc_v1_generated_eventarc_get_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Trigger", + "shortName": "get_trigger" + }, + "description": "Sample for GetTrigger", + "file": "eventarc_v1_generated_eventarc_get_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_channel_connections", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannelConnections", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannelConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager", + "shortName": "list_channel_connections" + }, + "description": "Sample for ListChannelConnections", + "file": "eventarc_v1_generated_eventarc_list_channel_connections_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannelConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channel_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_channel_connections", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannelConnections", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannelConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager", + "shortName": "list_channel_connections" + }, + "description": "Sample for ListChannelConnections", + "file": "eventarc_v1_generated_eventarc_list_channel_connections_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannelConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channel_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_channels", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannels", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager", + "shortName": "list_channels" + }, + "description": "Sample for ListChannels", + "file": "eventarc_v1_generated_eventarc_list_channels_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannels_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channels_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_channels", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannels", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager", + "shortName": "list_channels" + }, + "description": "Sample for ListChannels", + "file": "eventarc_v1_generated_eventarc_list_channels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_providers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListProviders", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListProviders" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListProvidersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager", + "shortName": "list_providers" + }, + "description": "Sample for ListProviders", + "file": "eventarc_v1_generated_eventarc_list_providers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListProviders_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_providers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_providers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListProviders", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListProviders" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListProvidersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager", + "shortName": "list_providers" + }, + "description": "Sample for ListProviders", + "file": "eventarc_v1_generated_eventarc_list_providers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListProviders_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_providers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_triggers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager", + "shortName": "list_triggers" + }, + "description": "Sample for ListTriggers", + "file": "eventarc_v1_generated_eventarc_list_triggers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_triggers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_triggers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager", + "shortName": "list_triggers" + }, + "description": "Sample for ListTriggers", + "file": "eventarc_v1_generated_eventarc_list_triggers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_triggers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateChannelRequest" + }, + { + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_channel" + }, + "description": "Sample for UpdateChannel", + "file": "eventarc_v1_generated_eventarc_update_channel_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateChannel_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_channel_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateChannelRequest" + }, + { + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_channel" + }, + "description": "Sample for UpdateChannel", + "file": "eventarc_v1_generated_eventarc_update_channel_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateChannel_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_channel_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest" + }, + { + "name": "google_channel_config", + "type": "google.cloud.eventarc_v1.types.GoogleChannelConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "update_google_channel_config" + }, + "description": "Sample for UpdateGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_update_google_channel_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_google_channel_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest" + }, + { + "name": "google_channel_config", + "type": "google.cloud.eventarc_v1.types.GoogleChannelConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "update_google_channel_config" + }, + "description": "Sample for UpdateGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_update_google_channel_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_google_channel_config_sync.py" }, { "canonical": true, diff --git a/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py b/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py index 2b6ac4b479..bdd67ffcdc 100755 --- a/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py +++ b/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py @@ -39,10 +39,23 @@ def partition( class eventarcCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_channel': ('parent', 'channel', 'channel_id', 'validate_only', ), + 'create_channel_connection': ('parent', 'channel_connection', 'channel_connection_id', ), 'create_trigger': ('parent', 'trigger', 'trigger_id', 'validate_only', ), + 'delete_channel': ('name', 'validate_only', ), + 'delete_channel_connection': ('name', ), 'delete_trigger': ('name', 'validate_only', 'etag', 'allow_missing', ), + 'get_channel': ('name', ), + 'get_channel_connection': ('name', ), + 'get_google_channel_config': ('name', ), + 'get_provider': ('name', ), 'get_trigger': ('name', ), - 'list_triggers': ('parent', 'page_size', 'page_token', 'order_by', ), + 'list_channel_connections': ('parent', 'page_size', 'page_token', ), + 'list_channels': ('parent', 'page_size', 'page_token', 'order_by', ), + 'list_providers': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), + 'list_triggers': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), + 'update_channel': ('validate_only', 'channel', 'update_mask', ), + 'update_google_channel_config': ('google_channel_config', 'update_mask', ), 'update_trigger': ('validate_only', 'trigger', 'update_mask', 'allow_missing', ), } diff --git a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index e87ed68bde..771528a988 100755 --- a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -51,17 +51,25 @@ from google.cloud.eventarc_v1.services.eventarc import EventarcClient from google.cloud.eventarc_v1.services.eventarc import pagers from google.cloud.eventarc_v1.services.eventarc import transports +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore import google.auth @@ -575,6 +583,7 @@ def test_get_trigger(request_type, transport: str = 'grpc'): name='name_value', uid='uid_value', service_account='service_account_value', + channel='channel_value', etag='etag_value', ) response = client.get_trigger(request) @@ -589,6 +598,7 @@ def test_get_trigger(request_type, transport: str = 'grpc'): assert response.name == 'name_value' assert response.uid == 'uid_value' assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' assert response.etag == 'etag_value' @@ -629,6 +639,7 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e name='name_value', uid='uid_value', service_account='service_account_value', + channel='channel_value', etag='etag_value', )) response = await client.get_trigger(request) @@ -643,6 +654,7 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e assert response.name == 'name_value' assert response.uid == 'uid_value' assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' assert response.etag == 'etag_value' @@ -1965,1997 +1977,10836 @@ async def test_delete_trigger_flattened_error_async(): @pytest.mark.parametrize("request_type", [ - eventarc.GetTriggerRequest, - dict, + eventarc.GetChannelRequest, + dict, ]) -def test_get_trigger_rest(request_type): +def test_get_channel(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - etag='etag_value', + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + pubsub_topic='pubsub_topic_value', ) + response = client.get_channel(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_trigger(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelRequest() # Establish that the response is the type that we expect. - assert isinstance(response, trigger.Trigger) + assert isinstance(response, channel.Channel) assert response.name == 'name_value' assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.etag == 'etag_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' -def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerRequest): - transport_class = transports.EventarcRestTransport +def test_get_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + client.get_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelRequest() - # verify fields with default values are dropped +@pytest.mark.asyncio +async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.get_channel(request) - jsonified_request["name"] = 'name_value' + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +@pytest.mark.asyncio +async def test_get_channel_async_from_dict(): + await test_get_channel_async(request_type=dict) + + +def test_get_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = trigger.Trigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - pb_return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_trigger(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetChannelRequest() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + call.return_value = channel.Channel() + client.get_channel(request) -def test_get_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.get_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +@pytest.mark.asyncio +async def test_get_channel_field_headers_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = trigger.Trigger.to_json(trigger.Trigger()) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetChannelRequest() - request = eventarc.GetTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = trigger.Trigger() + request.name = 'name_value' - client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel()) + await client.get_channel(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetTriggerRequest): +def test_get_channel_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel.Channel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_channel( + name='name_value', + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_trigger(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_get_trigger_rest_flattened(): +def test_get_channel_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = trigger.Trigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel( + eventarc.GetChannelRequest(), name='name_value', ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - client.get_trigger(**mock_args) +@pytest.mark.asyncio +async def test_get_channel_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel.Channel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_channel( + name='name_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) - + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_get_trigger_rest_flattened_error(transport: str = 'rest'): - client = EventarcClient( +@pytest.mark.asyncio +async def test_get_channel_flattened_error_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_trigger( - eventarc.GetTriggerRequest(), + await client.get_channel( + eventarc.GetChannelRequest(), name='name_value', ) -def test_get_trigger_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - @pytest.mark.parametrize("request_type", [ - eventarc.ListTriggersRequest, - dict, + eventarc.ListChannelsRequest, + dict, ]) -def test_list_triggers_rest(request_type): +def test_list_channels(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], ) + response = client.list_channels(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_triggers(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTriggersPager) + assert isinstance(response, pagers.ListChannelsPager) assert response.next_page_token == 'next_page_token_value' assert response.unreachable == ['unreachable_value'] -def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRequest): - transport_class = transports.EventarcRestTransport +def test_list_channels_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + client.list_channels() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest() - # verify fields with default values are dropped +@pytest.mark.asyncio +async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelsRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_channels(request) - jsonified_request["parent"] = 'parent_value' + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' +@pytest.mark.asyncio +async def test_list_channels_async_from_dict(): + await test_list_channels_async(request_type=dict) + + +def test_list_channels_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_triggers(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListChannelsRequest() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + request.parent = 'parent_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + call.return_value = eventarc.ListChannelsResponse() + client.list_channels(request) -def test_list_triggers_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.list_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_triggers_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +@pytest.mark.asyncio +async def test_list_channels_field_headers_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListChannelsRequest() - request = eventarc.ListTriggersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = eventarc.ListTriggersResponse() + request.parent = 'parent_value' - client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse()) + await client.list_channels(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] -def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListTriggersRequest): + +def test_list_channels_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_channels( + parent='parent_value', + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_triggers(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val -def test_list_triggers_rest_flattened(): +def test_list_channels_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channels( + eventarc.ListChannelsRequest(), parent='parent_value', ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +@pytest.mark.asyncio +async def test_list_channels_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - client.list_triggers(**mock_args) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_channels( + parent='parent_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) - + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val -def test_list_triggers_rest_flattened_error(transport: str = 'rest'): - client = EventarcClient( +@pytest.mark.asyncio +async def test_list_channels_flattened_error_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_triggers( - eventarc.ListTriggersRequest(), + await client.list_channels( + eventarc.ListChannelsRequest(), parent='parent_value', ) -def test_list_triggers_rest_pager(transport: str = 'rest'): +def test_list_channels_pager(transport_name: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - eventarc.ListTriggersResponse( - triggers=[ - trigger.Trigger(), - trigger.Trigger(), - trigger.Trigger(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), ], next_page_token='abc', ), - eventarc.ListTriggersResponse( - triggers=[], + eventarc.ListChannelsResponse( + channels=[], next_page_token='def', ), - eventarc.ListTriggersResponse( - triggers=[ - trigger.Trigger(), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), ], next_page_token='ghi', ), - eventarc.ListTriggersResponse( - triggers=[ - trigger.Trigger(), - trigger.Trigger(), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), ], ), + RuntimeError, ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(eventarc.ListTriggersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_channels(request={}) - pager = client.list_triggers(request=sample_request) + assert pager._metadata == metadata results = list(pager) assert len(results) == 6 - assert all(isinstance(i, trigger.Trigger) - for i in results) + assert all(isinstance(i, channel.Channel) + for i in results) +def test_list_channels_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) - pages = list(client.list_triggers(request=sample_request).pages) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + RuntimeError, + ) + pages = list(client.list_channels(request={}).pages) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token +@pytest.mark.asyncio +async def test_list_channels_async_pager(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_channels(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, channel.Channel) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_channels_async_pages(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_channels(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [ - eventarc.CreateTriggerRequest, - dict, + eventarc.CreateChannelRequest, + dict, ]) -def test_create_trigger_rest(request_type): +def test_create_channel(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_channel(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_trigger(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest() # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - + assert isinstance(response, future.Future) -def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTriggerRequest): - transport_class = transports.EventarcRestTransport - request_init = {} - request_init["parent"] = "" - request_init["trigger_id"] = "" - request_init["validate_only"] = False - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) +def test_create_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) - # verify fields with default values are dropped - assert "triggerId" not in jsonified_request - assert "validateOnly" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == request_init["trigger_id"] - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["triggerId"] = 'trigger_id_value' - jsonified_request["validateOnly"] = True - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("trigger_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + client.create_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest() - client = EventarcClient( +@pytest.mark.asyncio +async def test_create_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelRequest): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.create_trigger(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_channel(request) - expected_params = [ - ( - "triggerId", - "", - ), - ( - "validateOnly", - str(False).lower(), - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_create_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.create_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) +@pytest.mark.asyncio +async def test_create_channel_async_from_dict(): + await test_create_channel_async(request_type=dict) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +def test_create_channel_field_headers(): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateChannelRequest() - request = eventarc.CreateTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + request.parent = 'parent_value' - client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] -def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateTriggerRequest): - client = EventarcClient( + +@pytest.mark.asyncio +async def test_create_channel_field_headers_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateChannelRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_trigger(request) + request.parent = 'parent_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_channel(request) -def test_create_trigger_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_channel_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_channel( parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_trigger(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].channel + mock_val = gce_channel.Channel(name='name_value') + assert arg == mock_val + arg = args[0].channel_id + mock_val = 'channel_id_value' + assert arg == mock_val -def test_create_trigger_rest_flattened_error(transport: str = 'rest'): +def test_create_channel_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_trigger( - eventarc.CreateTriggerRequest(), + client.create_channel( + eventarc.CreateChannelRequest(), parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', ) - -def test_create_trigger_rest_error(): - client = EventarcClient( +@pytest.mark.asyncio +async def test_create_channel_flattened_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateTriggerRequest, - dict, -]) -def test_update_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_trigger(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_channel( + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].channel + mock_val = gce_channel.Channel(name='name_value') + assert arg == mock_val + arg = args[0].channel_id + mock_val = 'channel_id_value' + assert arg == mock_val +@pytest.mark.asyncio +async def test_create_channel_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTriggerRequest): - transport_class = transports.EventarcRestTransport + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_channel( + eventarc.CreateChannelRequest(), + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) - request_init = {} - request_init["validate_only"] = False - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - # verify fields with default values are dropped - assert "validateOnly" not in jsonified_request +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateChannelRequest, + dict, +]) +def test_update_channel(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # verify required fields with default values are now present - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_channel(request) - jsonified_request["validateOnly"] = True + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) - jsonified_request.update(unset_fields) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - # verify required fields with non-default values are left alone - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True +def test_update_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport='grpc', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + client.update_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_update_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateChannelRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.update_trigger(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_channel(request) - expected_params = [ - ( - "validateOnly", - str(False).lower(), - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_update_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.update_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) +@pytest.mark.asyncio +async def test_update_channel_async_from_dict(): + await test_update_channel_async(request_type=dict) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +def test_update_channel_field_headers(): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateChannelRequest() - request = eventarc.UpdateTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + request.channel.name = 'name_value' - client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_channel(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'channel.name=name_value', + ) in kw['metadata'] -def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateTriggerRequest): - client = EventarcClient( + +@pytest.mark.asyncio +async def test_update_channel_field_headers_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateChannelRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_trigger(request) + request.channel.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_channel(request) -def test_update_trigger_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'channel.name=name_value', + ) in kw['metadata'] + + +def test_update_channel_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - allow_missing=True, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_channel( + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_trigger(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{trigger.name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].channel + mock_val = gce_channel.Channel(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val -def test_update_trigger_rest_flattened_error(transport: str = 'rest'): +def test_update_channel_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_trigger( - eventarc.UpdateTriggerRequest(), - trigger=gce_trigger.Trigger(name='name_value'), + client.update_channel( + eventarc.UpdateChannelRequest(), + channel=gce_channel.Channel(name='name_value'), update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - allow_missing=True, ) +@pytest.mark.asyncio +async def test_update_channel_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_update_trigger_rest_error(): - client = EventarcClient( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_channel( + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].channel + mock_val = gce_channel.Channel(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_channel_flattened_error_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_channel( + eventarc.UpdateChannelRequest(), + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + @pytest.mark.parametrize("request_type", [ - eventarc.DeleteTriggerRequest, - dict, + eventarc.DeleteChannelRequest, + dict, ]) -def test_delete_trigger_rest(request_type): +def test_delete_channel(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_channel(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_trigger(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest() # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, future.Future) -def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTriggerRequest): - transport_class = transports.EventarcRestTransport +def test_delete_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) - request_init = {} - request_init["name"] = "" - request_init["validate_only"] = False - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + client.delete_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest() - # verify fields with default values are dropped - assert "validateOnly" not in jsonified_request +@pytest.mark.asyncio +async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # verify required fields with default values are now present - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_channel(request) - jsonified_request["name"] = 'name_value' - jsonified_request["validateOnly"] = True + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) - jsonified_request.update(unset_fields) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_channel_async_from_dict(): + await test_delete_channel_async(request_type=dict) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True +def test_delete_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteChannelRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = 'name_value' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel(request) - response = client.delete_trigger(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [ - ( - "validateOnly", - str(False).lower(), - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_delete_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) +@pytest.mark.asyncio +async def test_delete_channel_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - unset_fields = transport.delete_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteChannelRequest() + request.name = 'name_value' -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_channel(request) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.DeleteTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - pre.assert_called_once() - post.assert_called_once() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteTriggerRequest): +def test_delete_channel_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_channel( + name='name_value', + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_trigger(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_delete_trigger_rest_flattened(): +def test_delete_channel_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_channel( + eventarc.DeleteChannelRequest(), name='name_value', - allow_missing=True, ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +@pytest.mark.asyncio +async def test_delete_channel_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - client.delete_trigger(**mock_args) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_channel( + name='name_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) - + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): - client = EventarcClient( +@pytest.mark.asyncio +async def test_delete_channel_flattened_error_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_trigger( - eventarc.DeleteTriggerRequest(), + await client.delete_channel( + eventarc.DeleteChannelRequest(), name='name_value', - allow_missing=True, ) -def test_delete_trigger_rest_error(): +@pytest.mark.parametrize("request_type", [ + eventarc.GetProviderRequest, + dict, +]) +def test_get_provider(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = discovery.Provider( + name='name_value', + display_name='display_name_value', ) + response = client.get_provider(request) - # It is an error to provide a credentials file and a transport instance. - transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = EventarcClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest() - # It is an error to provide an api_key and a transport instance. - transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = EventarcClient( - client_options=options, - transport=transport, - ) + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = EventarcClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - # It is an error to provide scopes and a transport instance. - transport = transports.EventarcGrpcTransport( +def test_get_provider_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', ) - with pytest.raises(ValueError): - client = EventarcClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + client.get_provider() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.EventarcGrpcTransport( +@pytest.mark.asyncio +async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetProviderRequest): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = EventarcClient(transport=transport) - assert client.transport is transport -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - transport = transports.EventarcGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( + name='name_value', + display_name='display_name_value', + )) + response = await client.get_provider(request) -@pytest.mark.parametrize("transport_class", [ - transports.EventarcGrpcTransport, - transports.EventarcGrpcAsyncIOTransport, - transports.EventarcRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest() -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = EventarcClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. + +@pytest.mark.asyncio +async def test_get_provider_async_from_dict(): + await test_get_provider_async(request_type=dict) + + +def test_get_provider_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), ) - assert isinstance( - client.transport, - transports.EventarcGrpcTransport, - ) -def test_eventarc_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.EventarcTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetProviderRequest() + request.name = 'name_value' -def test_eventarc_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.EventarcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + call.return_value = discovery.Provider() + client.get_provider(request) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'get_trigger', - 'list_triggers', - 'create_trigger', - 'update_trigger', - 'delete_trigger', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - with pytest.raises(NotImplementedError): - transport.close() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() +@pytest.mark.asyncio +async def test_get_provider_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetProviderRequest() -def test_eventarc_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.EventarcTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider()) + await client.get_provider(request) -def test_eventarc_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.EventarcTransport() - adc.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_eventarc_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - EventarcClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) +def test_get_provider_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.EventarcGrpcTransport, - transports.EventarcGrpcAsyncIOTransport, - ], -) -def test_eventarc_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = discovery.Provider() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_provider( + name='name_value', ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -@pytest.mark.parametrize( - "transport_class", - [ - transports.EventarcGrpcTransport, - transports.EventarcGrpcAsyncIOTransport, - transports.EventarcRestTransport, - ], -) -def test_eventarc_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) +def test_get_provider_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.EventarcGrpcTransport, grpc_helpers), - (transports.EventarcGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_eventarc_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_provider( + eventarc.GetProviderRequest(), + name='name_value', ) - create_channel.assert_called_with( - "eventarc.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="eventarc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], +@pytest.mark.asyncio +async def test_get_provider_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = discovery.Provider() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_provider( + name='name_value', ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() +@pytest.mark.asyncio +async def test_get_provider_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_provider( + eventarc.GetProviderRequest(), + name='name_value', ) - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, + +@pytest.mark.parametrize("request_type", [ + eventarc.ListProvidersRequest, + dict, +]) +def test_list_providers(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvidersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_providers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + client.list_providers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest() + +@pytest.mark.asyncio +async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvidersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_providers_async_from_dict(): + await test_list_providers_async(request_type=dict) + + +def test_list_providers_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListProvidersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + call.return_value = eventarc.ListProvidersResponse() + client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_providers_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListProvidersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse()) + await client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_providers_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListProvidersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_providers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_providers_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_providers( + eventarc.ListProvidersRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_providers_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListProvidersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_providers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_providers_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_providers( + eventarc.ListProvidersRequest(), + parent='parent_value', + ) + + +def test_list_providers_pager(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_providers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, discovery.Provider) + for i in results) +def test_list_providers_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + RuntimeError, + ) + pages = list(client.list_providers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_providers_async_pager(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_providers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, discovery.Provider) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_providers_async_pages(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_providers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelConnectionRequest, + dict, +]) +def test_get_channel_connection(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + ) + response = client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, channel_connection.ChannelConnection) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + +def test_get_channel_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + client.get_channel_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest() + +@pytest.mark.asyncio +async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelConnectionRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + )) + response = await client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, channel_connection.ChannelConnection) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + +@pytest.mark.asyncio +async def test_get_channel_connection_async_from_dict(): + await test_get_channel_connection_async(request_type=dict) + + +def test_get_channel_connection_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetChannelConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + call.return_value = channel_connection.ChannelConnection() + client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_channel_connection_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetChannelConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection()) + await client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_channel_connection_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel_connection.ChannelConnection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_channel_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_channel_connection_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel_connection( + eventarc.GetChannelConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_channel_connection_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel_connection.ChannelConnection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_channel_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_channel_connection_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_channel_connection( + eventarc.GetChannelConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelConnectionsRequest, + dict, +]) +def test_list_channel_connections(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_channel_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + client.list_channel_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest() + +@pytest.mark.asyncio +async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelConnectionsRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelConnectionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_channel_connections_async_from_dict(): + await test_list_channel_connections_async(request_type=dict) + + +def test_list_channel_connections_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListChannelConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + call.return_value = eventarc.ListChannelConnectionsResponse() + client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_channel_connections_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListChannelConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse()) + await client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_channel_connections_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_channel_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_channel_connections_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channel_connections( + eventarc.ListChannelConnectionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_channel_connections_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_channel_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_channel_connections_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_channel_connections( + eventarc.ListChannelConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_channel_connections_pager(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_channel_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel_connection.ChannelConnection) + for i in results) +def test_list_channel_connections_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_channel_connections(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_channel_connections_async_pager(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_channel_connections(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, channel_connection.ChannelConnection) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_channel_connections_async_pages(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_channel_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelConnectionRequest, + dict, +]) +def test_create_channel_connection(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_channel_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + client.create_channel_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest() + +@pytest.mark.asyncio +async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelConnectionRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_channel_connection_async_from_dict(): + await test_create_channel_connection_async(request_type=dict) + + +def test_create_channel_connection_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateChannelConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_channel_connection_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateChannelConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_channel_connection_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_channel_connection( + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].channel_connection + mock_val = gce_channel_connection.ChannelConnection(name='name_value') + assert arg == mock_val + arg = args[0].channel_connection_id + mock_val = 'channel_connection_id_value' + assert arg == mock_val + + +def test_create_channel_connection_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_channel_connection( + eventarc.CreateChannelConnectionRequest(), + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + +@pytest.mark.asyncio +async def test_create_channel_connection_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_channel_connection( + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].channel_connection + mock_val = gce_channel_connection.ChannelConnection(name='name_value') + assert arg == mock_val + arg = args[0].channel_connection_id + mock_val = 'channel_connection_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_channel_connection_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_channel_connection( + eventarc.CreateChannelConnectionRequest(), + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelConnectionRequest, + dict, +]) +def test_delete_channel_connection(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_channel_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + client.delete_channel_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest() + +@pytest.mark.asyncio +async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelConnectionRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_channel_connection_async_from_dict(): + await test_delete_channel_connection_async(request_type=dict) + + +def test_delete_channel_connection_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteChannelConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_channel_connection_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteChannelConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_channel_connection_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_channel_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_channel_connection_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_channel_connection( + eventarc.DeleteChannelConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_channel_connection_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_channel_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_channel_connection_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_channel_connection( + eventarc.DeleteChannelConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetGoogleChannelConfigRequest, + dict, +]) +def test_get_google_channel_config(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + response = client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_get_google_channel_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + client.get_google_channel_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest() + +@pytest.mark.asyncio +async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleChannelConfigRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.asyncio +async def test_get_google_channel_config_async_from_dict(): + await test_get_google_channel_config_async(request_type=dict) + + +def test_get_google_channel_config_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetGoogleChannelConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + call.return_value = google_channel_config.GoogleChannelConfig() + client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_google_channel_config_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetGoogleChannelConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig()) + await client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_google_channel_config_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = google_channel_config.GoogleChannelConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_google_channel_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_google_channel_config_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_google_channel_config( + eventarc.GetGoogleChannelConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_google_channel_config_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = google_channel_config.GoogleChannelConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_google_channel_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_google_channel_config_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_google_channel_config( + eventarc.GetGoogleChannelConfigRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateGoogleChannelConfigRequest, + dict, +]) +def test_update_google_channel_config(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + response = client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_update_google_channel_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + client.update_google_channel_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + +@pytest.mark.asyncio +async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleChannelConfigRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.asyncio +async def test_update_google_channel_config_async_from_dict(): + await test_update_google_channel_config_async(request_type=dict) + + +def test_update_google_channel_config_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateGoogleChannelConfigRequest() + + request.google_channel_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + call.return_value = gce_google_channel_config.GoogleChannelConfig() + client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'google_channel_config.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_google_channel_config_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateGoogleChannelConfigRequest() + + request.google_channel_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig()) + await client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'google_channel_config.name=name_value', + ) in kw['metadata'] + + +def test_update_google_channel_config_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gce_google_channel_config.GoogleChannelConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_google_channel_config( + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].google_channel_config + mock_val = gce_google_channel_config.GoogleChannelConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_google_channel_config_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_google_channel_config( + eventarc.UpdateGoogleChannelConfigRequest(), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_google_channel_config_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gce_google_channel_config.GoogleChannelConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_google_channel_config( + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].google_channel_config + mock_val = gce_google_channel_config.GoogleChannelConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_google_channel_config_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_google_channel_config( + eventarc.UpdateGoogleChannelConfigRequest(), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetTriggerRequest, + dict, +]) +def test_get_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' + assert response.etag == 'etag_value' + + +def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_trigger(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = trigger.Trigger.to_json(trigger.Trigger()) + + request = eventarc.GetTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = trigger.Trigger() + + client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_trigger(request) + + +def test_get_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_get_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_trigger( + eventarc.GetTriggerRequest(), + name='name_value', + ) + + +def test_get_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListTriggersRequest, + dict, +]) +def test_list_triggers_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTriggersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_triggers(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_triggers_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_triggers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + + request = eventarc.ListTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListTriggersResponse() + + client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListTriggersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_triggers(request) + + +def test_list_triggers_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + + +def test_list_triggers_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_triggers( + eventarc.ListTriggersRequest(), + parent='parent_value', + ) + + +def test_list_triggers_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + trigger.Trigger(), + ], + next_page_token='abc', + ), + eventarc.ListTriggersResponse( + triggers=[], + next_page_token='def', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + ], + next_page_token='ghi', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, trigger.Trigger) + for i in results) + + pages = list(client.list_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateTriggerRequest, + dict, +]) +def test_create_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["trigger_id"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "triggerId" not in jsonified_request + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == request_init["trigger_id"] + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["triggerId"] = 'trigger_id_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("trigger_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_trigger(request) + + expected_params = [ + ( + "triggerId", + "", + ), + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.CreateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_trigger(request) + + +def test_create_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + + +def test_create_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_trigger( + eventarc.CreateTriggerRequest(), + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + + +def test_create_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateTriggerRequest, + dict, +]) +def test_update_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_trigger(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.UpdateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_trigger(request) + + +def test_update_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{trigger.name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_update_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_trigger( + eventarc.UpdateTriggerRequest(), + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + + +def test_update_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteTriggerRequest, + dict, +]) +def test_delete_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["name"] = 'name_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_trigger(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.DeleteTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_trigger(request) + + +def test_delete_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + allow_missing=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_trigger( + eventarc.DeleteTriggerRequest(), + name='name_value', + allow_missing=True, + ) + + +def test_delete_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelRequest, + dict, +]) +def test_get_channel_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + pubsub_topic='pubsub_topic_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_channel(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = channel.Channel() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_channel(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_channel._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = channel.Channel.to_json(channel.Channel()) + + request = eventarc.GetChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel.Channel() + + client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_channel(request) + + +def test_get_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel.Channel() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + + +def test_get_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel( + eventarc.GetChannelRequest(), + name='name_value', + ) + + +def test_get_channel_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelsRequest, + dict, +]) +def test_list_channels_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_channels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_channels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_channels_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_channels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_channels_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) + + request = eventarc.ListChannelsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListChannelsResponse() + + client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_channels_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_channels(request) + + +def test_list_channels_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_channels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + + +def test_list_channels_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channels( + eventarc.ListChannelsRequest(), + parent='parent_value', + ) + + +def test_list_channels_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListChannelsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_channels(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel.Channel) + for i in results) + + pages = list(client.list_channels(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelRequest, + dict, +]) +def test_create_channel_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_channel(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["channel_id"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "channelId" not in jsonified_request + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "channelId" in jsonified_request + assert jsonified_request["channelId"] == request_init["channel_id"] + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["channelId"] = 'channel_id_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("channel_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "channelId" in jsonified_request + assert jsonified_request["channelId"] == 'channel_id_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_channel(request) + + expected_params = [ + ( + "channelId", + "", + ), + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_channel_._get_unset_required_fields({}) + assert set(unset_fields) == (set(("channelId", "validateOnly", )) & set(("parent", "channel", "channelId", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.CreateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_channel(request) + + +def test_create_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + + +def test_create_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_channel( + eventarc.CreateChannelRequest(), + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + + +def test_create_channel_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateChannelRequest, + dict, +]) +def test_update_channel_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_channel(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_channel(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_channel._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.UpdateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_channel(request) + + +def test_update_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{channel.name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + + +def test_update_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_channel( + eventarc.UpdateChannelRequest(), + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_channel_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelRequest, + dict, +]) +def test_delete_channel_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_channel(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["name"] = 'name_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_channel(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_channel._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", )) & set(("name", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.DeleteChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_channel(request) + + +def test_delete_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + + +def test_delete_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_channel( + eventarc.DeleteChannelRequest(), + name='name_value', + ) + + +def test_delete_channel_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetProviderRequest, + dict, +]) +def test_get_provider_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = discovery.Provider( + name='name_value', + display_name='display_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_provider(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = discovery.Provider() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_provider(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_provider_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_provider._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_provider_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = discovery.Provider.to_json(discovery.Provider()) + + request = eventarc.GetProviderRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discovery.Provider() + + client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_provider_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetProviderRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_provider(request) + + +def test_get_provider_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = discovery.Provider() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_provider(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/providers/*}" % client.transport._host, args[1]) + + +def test_get_provider_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_provider( + eventarc.GetProviderRequest(), + name='name_value', + ) + + +def test_get_provider_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListProvidersRequest, + dict, +]) +def test_list_providers_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_providers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvidersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_providers_rest_required_fields(request_type=eventarc.ListProvidersRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_providers(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_providers_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_providers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_providers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) + + request = eventarc.ListProvidersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListProvidersResponse() + + client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_providers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListProvidersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_providers(request) + + +def test_list_providers_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_providers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/providers" % client.transport._host, args[1]) + + +def test_list_providers_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_providers( + eventarc.ListProvidersRequest(), + parent='parent_value', + ) + + +def test_list_providers_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListProvidersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_providers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, discovery.Provider) + for i in results) + + pages = list(client.list_providers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelConnectionRequest, + dict, +]) +def test_get_channel_connection_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_channel_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel_connection.ChannelConnection) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + +def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetChannelConnectionRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_channel_connection(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_channel_connection_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_channel_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) + + request = eventarc.GetChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel_connection.ChannelConnection() + + client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_channel_connection(request) + + +def test_get_channel_connection_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_channel_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) + + +def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel_connection( + eventarc.GetChannelConnectionRequest(), + name='name_value', + ) + + +def test_get_channel_connection_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelConnectionsRequest, + dict, +]) +def test_list_channel_connections_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_channel_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_channel_connections_rest_required_fields(request_type=eventarc.ListChannelConnectionsRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_channel_connections(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_channel_connections_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_channel_connections._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_channel_connections_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) + + request = eventarc.ListChannelConnectionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListChannelConnectionsResponse() + + client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_channel_connections_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelConnectionsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_channel_connections(request) + + +def test_list_channel_connections_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_channel_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) + + +def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channel_connections( + eventarc.ListChannelConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_channel_connections_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListChannelConnectionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_channel_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel_connection.ChannelConnection) + for i in results) + + pages = list(client.list_channel_connections(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelConnectionRequest, + dict, +]) +def test_create_channel_connection_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_channel_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_channel_connection_rest_required_fields(request_type=eventarc.CreateChannelConnectionRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["channel_connection_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "channelConnectionId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "channelConnectionId" in jsonified_request + assert jsonified_request["channelConnectionId"] == request_init["channel_connection_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["channelConnectionId"] = 'channel_connection_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("channel_connection_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "channelConnectionId" in jsonified_request + assert jsonified_request["channelConnectionId"] == 'channel_connection_id_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_channel_connection(request) + + expected_params = [ + ( + "channelConnectionId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_channel_connection_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_channel_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("channelConnectionId", )) & set(("parent", "channelConnection", "channelConnectionId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.CreateChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_channel_connection(request) + + +def test_create_channel_connection_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_channel_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) + + +def test_create_channel_connection_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_channel_connection( + eventarc.CreateChannelConnectionRequest(), + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + + +def test_create_channel_connection_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelConnectionRequest, + dict, +]) +def test_delete_channel_connection_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_channel_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_channel_connection_rest_required_fields(request_type=eventarc.DeleteChannelConnectionRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_channel_connection(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_channel_connection_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_channel_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.DeleteChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_channel_connection(request) + + +def test_delete_channel_connection_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_channel_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) + + +def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_channel_connection( + eventarc.DeleteChannelConnectionRequest(), + name='name_value', + ) + + +def test_delete_channel_connection_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetGoogleChannelConfigRequest, + dict, +]) +def test_get_google_channel_config_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_get_google_channel_config_rest_required_fields(request_type=eventarc.GetGoogleChannelConfigRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = google_channel_config.GoogleChannelConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_google_channel_config(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_google_channel_config_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_google_channel_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_google_channel_config_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) + + request = eventarc.GetGoogleChannelConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = google_channel_config.GoogleChannelConfig() + + client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetGoogleChannelConfigRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_google_channel_config(request) + + +def test_get_google_channel_config_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = google_channel_config.GoogleChannelConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_google_channel_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) + + +def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_google_channel_config( + eventarc.GetGoogleChannelConfigRequest(), + name='name_value', + ) + + +def test_get_google_channel_config_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateGoogleChannelConfigRequest, + dict, +]) +def test_update_google_channel_config_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_update_google_channel_config_rest_required_fields(request_type=eventarc.UpdateGoogleChannelConfigRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gce_google_channel_config.GoogleChannelConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_google_channel_config(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_google_channel_config_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_google_channel_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("googleChannelConfig", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_google_channel_config_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) + + request = eventarc.UpdateGoogleChannelConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gce_google_channel_config.GoogleChannelConfig() + + client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateGoogleChannelConfigRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_google_channel_config(request) + + +def test_update_google_channel_config_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gce_google_channel_config.GoogleChannelConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + + # get truthy value for each flattened field + mock_args = dict( + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_google_channel_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) + + +def test_update_google_channel_config_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_google_channel_config( + eventarc.UpdateGoogleChannelConfigRequest(), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_google_channel_config_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EventarcClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EventarcClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EventarcClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EventarcClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = EventarcClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.EventarcGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + transports.EventarcRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = EventarcClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.EventarcGrpcTransport, + ) + +def test_eventarc_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.EventarcTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_eventarc_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.EventarcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get_trigger', + 'list_triggers', + 'create_trigger', + 'update_trigger', + 'delete_trigger', + 'get_channel', + 'list_channels', + 'create_channel_', + 'update_channel', + 'delete_channel', + 'get_provider', + 'list_providers', + 'get_channel_connection', + 'list_channel_connections', + 'create_channel_connection', + 'delete_channel_connection', + 'get_google_channel_config', + 'update_google_channel_config', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_eventarc_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EventarcTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_eventarc_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EventarcTransport() + adc.assert_called_once() + + +def test_eventarc_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + EventarcClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + ], +) +def test_eventarc_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + transports.EventarcRestTransport, + ], +) +def test_eventarc_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.EventarcGrpcTransport, grpc_helpers), + (transports.EventarcGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_eventarc_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "eventarc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="eventarc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_eventarc_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.EventarcRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_eventarc_rest_lro_client(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_eventarc_host_no_port(transport_name): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'eventarc.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://eventarc.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_eventarc_host_with_port(transport_name): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'eventarc.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://eventarc.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_eventarc_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = EventarcClient( + credentials=creds1, + transport=transport_name, + ) + client2 = EventarcClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_trigger._session + session2 = client2.transport.get_trigger._session + assert session1 != session2 + session1 = client1.transport.list_triggers._session + session2 = client2.transport.list_triggers._session + assert session1 != session2 + session1 = client1.transport.create_trigger._session + session2 = client2.transport.create_trigger._session + assert session1 != session2 + session1 = client1.transport.update_trigger._session + session2 = client2.transport.update_trigger._session + assert session1 != session2 + session1 = client1.transport.delete_trigger._session + session2 = client2.transport.delete_trigger._session + assert session1 != session2 + session1 = client1.transport.get_channel._session + session2 = client2.transport.get_channel._session + assert session1 != session2 + session1 = client1.transport.list_channels._session + session2 = client2.transport.list_channels._session + assert session1 != session2 + session1 = client1.transport.create_channel_._session + session2 = client2.transport.create_channel_._session + assert session1 != session2 + session1 = client1.transport.update_channel._session + session2 = client2.transport.update_channel._session + assert session1 != session2 + session1 = client1.transport.delete_channel._session + session2 = client2.transport.delete_channel._session + assert session1 != session2 + session1 = client1.transport.get_provider._session + session2 = client2.transport.get_provider._session + assert session1 != session2 + session1 = client1.transport.list_providers._session + session2 = client2.transport.list_providers._session + assert session1 != session2 + session1 = client1.transport.get_channel_connection._session + session2 = client2.transport.get_channel_connection._session + assert session1 != session2 + session1 = client1.transport.list_channel_connections._session + session2 = client2.transport.list_channel_connections._session + assert session1 != session2 + session1 = client1.transport.create_channel_connection._session + session2 = client2.transport.create_channel_connection._session + assert session1 != session2 + session1 = client1.transport.delete_channel_connection._session + session2 = client2.transport.delete_channel_connection._session + assert session1 != session2 + session1 = client1.transport.get_google_channel_config._session + session2 = client2.transport.get_google_channel_config._session + assert session1 != session2 + session1 = client1.transport.update_google_channel_config._session + session2 = client2.transport.update_google_channel_config._session + assert session1 != session2 +def test_eventarc_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EventarcGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_eventarc_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EventarcGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_eventarc_grpc_lro_client(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_eventarc_grpc_lro_async_client(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_channel_path(): + project = "squid" + location = "clam" + channel = "whelk" + expected = "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) + actual = EventarcClient.channel_path(project, location, channel) + assert expected == actual + + +def test_parse_channel_path(): + expected = { + "project": "octopus", + "location": "oyster", + "channel": "nudibranch", + } + path = EventarcClient.channel_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_channel_path(path) + assert expected == actual + +def test_channel_connection_path(): + project = "cuttlefish" + location = "mussel" + channel_connection = "winkle" + expected = "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) + actual = EventarcClient.channel_connection_path(project, location, channel_connection) + assert expected == actual + + +def test_parse_channel_connection_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "channel_connection": "abalone", + } + path = EventarcClient.channel_connection_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_channel_connection_path(path) + assert expected == actual + +def test_cloud_function_path(): + project = "squid" + location = "clam" + function = "whelk" + expected = "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) + actual = EventarcClient.cloud_function_path(project, location, function) + assert expected == actual + + +def test_parse_cloud_function_path(): + expected = { + "project": "octopus", + "location": "oyster", + "function": "nudibranch", + } + path = EventarcClient.cloud_function_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_cloud_function_path(path) + assert expected == actual + +def test_crypto_key_path(): + project = "cuttlefish" + location = "mussel" + key_ring = "winkle" + crypto_key = "nautilus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + actual = EventarcClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "scallop", + "location": "abalone", + "key_ring": "squid", + "crypto_key": "clam", + } + path = EventarcClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_crypto_key_path(path) + assert expected == actual + +def test_google_channel_config_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) + actual = EventarcClient.google_channel_config_path(project, location) + assert expected == actual + + +def test_parse_google_channel_config_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = EventarcClient.google_channel_config_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_google_channel_config_path(path) + assert expected == actual + +def test_provider_path(): + project = "cuttlefish" + location = "mussel" + provider = "winkle" + expected = "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) + actual = EventarcClient.provider_path(project, location, provider) + assert expected == actual + + +def test_parse_provider_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "provider": "abalone", + } + path = EventarcClient.provider_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_provider_path(path) + assert expected == actual + +def test_service_path(): + expected = "*".format() + actual = EventarcClient.service_path() + assert expected == actual + + +def test_parse_service_path(): + expected = { + } + path = EventarcClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_service_path(path) + assert expected == actual + +def test_service_account_path(): + project = "squid" + service_account = "clam" + expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + actual = EventarcClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "whelk", + "service_account": "octopus", + } + path = EventarcClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_service_account_path(path) + assert expected == actual + +def test_trigger_path(): + project = "oyster" + location = "nudibranch" + trigger = "cuttlefish" + expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + actual = EventarcClient.trigger_path(project, location, trigger) + assert expected == actual + + +def test_parse_trigger_path(): + expected = { + "project": "mussel", + "location": "winkle", + "trigger": "nautilus", + } + path = EventarcClient.trigger_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_trigger_path(path) + assert expected == actual + +def test_workflow_path(): + project = "scallop" + location = "abalone" + workflow = "squid" + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) + actual = EventarcClient.workflow_path(project, location, workflow) + assert expected == actual + + +def test_parse_workflow_path(): + expected = { + "project": "clam", + "location": "whelk", + "workflow": "octopus", + } + path = EventarcClient.workflow_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_workflow_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = EventarcClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = EventarcClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = EventarcClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = EventarcClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = EventarcClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = EventarcClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = EventarcClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = EventarcClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = EventarcClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = EventarcClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + transport_class = EventarcClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + +def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_eventarc_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.EventarcRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_eventarc_rest_lro_client(): + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_eventarc_host_no_port(transport_name): - client = EventarcClient( + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'eventarc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://eventarc.googleapis.com' ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_eventarc_host_with_port(transport_name): + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'eventarc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://eventarc.googleapis.com:8000' ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_eventarc_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = EventarcClient( - credentials=creds1, - transport=transport_name, - ) - client2 = EventarcClient( - credentials=creds2, - transport=transport_name, + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - session1 = client1.transport.get_trigger._session - session2 = client2.transport.get_trigger._session - assert session1 != session2 - session1 = client1.transport.list_triggers._session - session2 = client2.transport.list_triggers._session - assert session1 != session2 - session1 = client1.transport.create_trigger._session - session2 = client2.transport.create_trigger._session - assert session1 != session2 - session1 = client1.transport.update_trigger._session - session2 = client2.transport.update_trigger._session - assert session1 != session2 - session1 = client1.transport.delete_trigger._session - session2 = client2.transport.delete_trigger._session - assert session1 != session2 -def test_eventarc_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) - # Check that channel is used if provided. - transport = transports.EventarcGrpcTransport( - host="squid.clam.whelk", - channel=channel, + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + +def test_get_iam_policy(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() -def test_eventarc_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - # Check that channel is used if provided. - transport = transports.EventarcGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel + response = await client.get_iam_policy(request) - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() + assert response.version == 774 - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + assert response.etag == b"etag_blob" - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel +def test_get_iam_policy_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() -def test_eventarc_grpc_lro_client(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport + client.get_iam_policy(request) - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_eventarc_grpc_lro_async_client(): +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) -def test_service_path(): - expected = "*".format() - actual = EventarcClient.service_path() - assert expected == actual + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_parse_service_path(): - expected = { - } - path = EventarcClient.service_path(**expected) - # Check that the path construction is reversible. - actual = EventarcClient.parse_service_path(path) - assert expected == actual +def test_get_iam_policy_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() -def test_service_account_path(): - project = "squid" - service_account = "clam" - expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) - actual = EventarcClient.service_account_path(project, service_account) - assert expected == actual + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) -def test_parse_service_account_path(): - expected = { - "project": "whelk", - "service_account": "octopus", - } - path = EventarcClient.service_account_path(**expected) + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() - # Check that the path construction is reversible. - actual = EventarcClient.parse_service_account_path(path) - assert expected == actual +def test_test_iam_permissions(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) -def test_trigger_path(): - project = "oyster" - location = "nudibranch" - trigger = "cuttlefish" - expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) - actual = EventarcClient.trigger_path(project, location, trigger) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) -def test_parse_trigger_path(): - expected = { - "project": "mussel", - "location": "winkle", - "trigger": "nautilus", - } - path = EventarcClient.trigger_path(**expected) + response = client.test_iam_permissions(request) - # Check that the path construction is reversible. - actual = EventarcClient.parse_trigger_path(path) - assert expected == actual + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = EventarcClient.common_billing_account_path(billing_account) - assert expected == actual + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = EventarcClient.common_billing_account_path(**expected) + assert response.permissions == ["permissions_value"] - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_billing_account_path(path) - assert expected == actual -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = EventarcClient.common_folder_path(folder) - assert expected == actual +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = EventarcClient.common_folder_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_folder_path(path) - assert expected == actual + response = await client.test_iam_permissions(request) -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = EventarcClient.common_organization_path(organization) - assert expected == actual + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = EventarcClient.common_organization_path(**expected) + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_organization_path(path) - assert expected == actual + assert response.permissions == ["permissions_value"] -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = EventarcClient.common_project_path(project) - assert expected == actual +def test_test_iam_permissions_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = EventarcClient.common_project_path(**expected) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_project_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = EventarcClient.common_location_path(project, location) - assert expected == actual + client.test_iam_permissions(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = EventarcClient.common_location_path(**expected) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_location_path(path) - assert expected == actual +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" - with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() ) - prep.assert_called_once_with(client_info) - with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: - transport_class = EventarcClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } ) - prep.assert_called_once_with(client_info) + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_test_iam_permissions_from_dict_async(): client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() def test_transport_close(): transports = { diff --git a/tests/integration/goldens/logging/docs/_static/custom.css b/tests/integration/goldens/logging/docs/_static/custom.css new file mode 100755 index 0000000000..06423be0b5 --- /dev/null +++ b/tests/integration/goldens/logging/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/tests/integration/goldens/logging/docs/conf.py b/tests/integration/goldens/logging/docs/conf.py index 6291cb766d..c78410beee 100755 --- a/tests/integration/goldens/logging/docs/conf.py +++ b/tests/integration/goldens/logging/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/tests/integration/goldens/logging/google/cloud/logging/__init__.py index d8b1e1bfee..fb661a83ae 100755 --- a/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -28,6 +28,7 @@ from google.cloud.logging_v2.types.log_entry import LogEntry from google.cloud.logging_v2.types.log_entry import LogEntryOperation from google.cloud.logging_v2.types.log_entry import LogEntrySourceLocation +from google.cloud.logging_v2.types.log_entry import LogSplit from google.cloud.logging_v2.types.logging import DeleteLogRequest from google.cloud.logging_v2.types.logging import ListLogEntriesRequest from google.cloud.logging_v2.types.logging import ListLogEntriesResponse @@ -40,40 +41,59 @@ from google.cloud.logging_v2.types.logging import WriteLogEntriesPartialErrors from google.cloud.logging_v2.types.logging import WriteLogEntriesRequest from google.cloud.logging_v2.types.logging import WriteLogEntriesResponse +from google.cloud.logging_v2.types.logging_config import BigQueryDataset from google.cloud.logging_v2.types.logging_config import BigQueryOptions +from google.cloud.logging_v2.types.logging_config import BucketMetadata from google.cloud.logging_v2.types.logging_config import CmekSettings +from google.cloud.logging_v2.types.logging_config import CopyLogEntriesMetadata +from google.cloud.logging_v2.types.logging_config import CopyLogEntriesRequest +from google.cloud.logging_v2.types.logging_config import CopyLogEntriesResponse from google.cloud.logging_v2.types.logging_config import CreateBucketRequest from google.cloud.logging_v2.types.logging_config import CreateExclusionRequest +from google.cloud.logging_v2.types.logging_config import CreateLinkRequest from google.cloud.logging_v2.types.logging_config import CreateSinkRequest from google.cloud.logging_v2.types.logging_config import CreateViewRequest from google.cloud.logging_v2.types.logging_config import DeleteBucketRequest from google.cloud.logging_v2.types.logging_config import DeleteExclusionRequest +from google.cloud.logging_v2.types.logging_config import DeleteLinkRequest from google.cloud.logging_v2.types.logging_config import DeleteSinkRequest from google.cloud.logging_v2.types.logging_config import DeleteViewRequest from google.cloud.logging_v2.types.logging_config import GetBucketRequest from google.cloud.logging_v2.types.logging_config import GetCmekSettingsRequest from google.cloud.logging_v2.types.logging_config import GetExclusionRequest +from google.cloud.logging_v2.types.logging_config import GetLinkRequest +from google.cloud.logging_v2.types.logging_config import GetSettingsRequest from google.cloud.logging_v2.types.logging_config import GetSinkRequest from google.cloud.logging_v2.types.logging_config import GetViewRequest +from google.cloud.logging_v2.types.logging_config import IndexConfig +from google.cloud.logging_v2.types.logging_config import Link +from google.cloud.logging_v2.types.logging_config import LinkMetadata from google.cloud.logging_v2.types.logging_config import ListBucketsRequest from google.cloud.logging_v2.types.logging_config import ListBucketsResponse from google.cloud.logging_v2.types.logging_config import ListExclusionsRequest from google.cloud.logging_v2.types.logging_config import ListExclusionsResponse +from google.cloud.logging_v2.types.logging_config import ListLinksRequest +from google.cloud.logging_v2.types.logging_config import ListLinksResponse from google.cloud.logging_v2.types.logging_config import ListSinksRequest from google.cloud.logging_v2.types.logging_config import ListSinksResponse from google.cloud.logging_v2.types.logging_config import ListViewsRequest from google.cloud.logging_v2.types.logging_config import ListViewsResponse +from google.cloud.logging_v2.types.logging_config import LocationMetadata from google.cloud.logging_v2.types.logging_config import LogBucket from google.cloud.logging_v2.types.logging_config import LogExclusion from google.cloud.logging_v2.types.logging_config import LogSink from google.cloud.logging_v2.types.logging_config import LogView +from google.cloud.logging_v2.types.logging_config import Settings from google.cloud.logging_v2.types.logging_config import UndeleteBucketRequest from google.cloud.logging_v2.types.logging_config import UpdateBucketRequest from google.cloud.logging_v2.types.logging_config import UpdateCmekSettingsRequest from google.cloud.logging_v2.types.logging_config import UpdateExclusionRequest +from google.cloud.logging_v2.types.logging_config import UpdateSettingsRequest from google.cloud.logging_v2.types.logging_config import UpdateSinkRequest from google.cloud.logging_v2.types.logging_config import UpdateViewRequest +from google.cloud.logging_v2.types.logging_config import IndexType from google.cloud.logging_v2.types.logging_config import LifecycleState +from google.cloud.logging_v2.types.logging_config import OperationState from google.cloud.logging_v2.types.logging_metrics import CreateLogMetricRequest from google.cloud.logging_v2.types.logging_metrics import DeleteLogMetricRequest from google.cloud.logging_v2.types.logging_metrics import GetLogMetricRequest @@ -91,6 +111,7 @@ 'LogEntry', 'LogEntryOperation', 'LogEntrySourceLocation', + 'LogSplit', 'DeleteLogRequest', 'ListLogEntriesRequest', 'ListLogEntriesResponse', @@ -103,40 +124,59 @@ 'WriteLogEntriesPartialErrors', 'WriteLogEntriesRequest', 'WriteLogEntriesResponse', + 'BigQueryDataset', 'BigQueryOptions', + 'BucketMetadata', 'CmekSettings', + 'CopyLogEntriesMetadata', + 'CopyLogEntriesRequest', + 'CopyLogEntriesResponse', 'CreateBucketRequest', 'CreateExclusionRequest', + 'CreateLinkRequest', 'CreateSinkRequest', 'CreateViewRequest', 'DeleteBucketRequest', 'DeleteExclusionRequest', + 'DeleteLinkRequest', 'DeleteSinkRequest', 'DeleteViewRequest', 'GetBucketRequest', 'GetCmekSettingsRequest', 'GetExclusionRequest', + 'GetLinkRequest', + 'GetSettingsRequest', 'GetSinkRequest', 'GetViewRequest', + 'IndexConfig', + 'Link', + 'LinkMetadata', 'ListBucketsRequest', 'ListBucketsResponse', 'ListExclusionsRequest', 'ListExclusionsResponse', + 'ListLinksRequest', + 'ListLinksResponse', 'ListSinksRequest', 'ListSinksResponse', 'ListViewsRequest', 'ListViewsResponse', + 'LocationMetadata', 'LogBucket', 'LogExclusion', 'LogSink', 'LogView', + 'Settings', 'UndeleteBucketRequest', 'UpdateBucketRequest', 'UpdateCmekSettingsRequest', 'UpdateExclusionRequest', + 'UpdateSettingsRequest', 'UpdateSinkRequest', 'UpdateViewRequest', + 'IndexType', 'LifecycleState', + 'OperationState', 'CreateLogMetricRequest', 'DeleteLogMetricRequest', 'GetLogMetricRequest', diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index c40cb2b0d9..b6bfee061f 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -28,6 +28,7 @@ from .types.log_entry import LogEntry from .types.log_entry import LogEntryOperation from .types.log_entry import LogEntrySourceLocation +from .types.log_entry import LogSplit from .types.logging import DeleteLogRequest from .types.logging import ListLogEntriesRequest from .types.logging import ListLogEntriesResponse @@ -40,40 +41,59 @@ from .types.logging import WriteLogEntriesPartialErrors from .types.logging import WriteLogEntriesRequest from .types.logging import WriteLogEntriesResponse +from .types.logging_config import BigQueryDataset from .types.logging_config import BigQueryOptions +from .types.logging_config import BucketMetadata from .types.logging_config import CmekSettings +from .types.logging_config import CopyLogEntriesMetadata +from .types.logging_config import CopyLogEntriesRequest +from .types.logging_config import CopyLogEntriesResponse from .types.logging_config import CreateBucketRequest from .types.logging_config import CreateExclusionRequest +from .types.logging_config import CreateLinkRequest from .types.logging_config import CreateSinkRequest from .types.logging_config import CreateViewRequest from .types.logging_config import DeleteBucketRequest from .types.logging_config import DeleteExclusionRequest +from .types.logging_config import DeleteLinkRequest from .types.logging_config import DeleteSinkRequest from .types.logging_config import DeleteViewRequest from .types.logging_config import GetBucketRequest from .types.logging_config import GetCmekSettingsRequest from .types.logging_config import GetExclusionRequest +from .types.logging_config import GetLinkRequest +from .types.logging_config import GetSettingsRequest from .types.logging_config import GetSinkRequest from .types.logging_config import GetViewRequest +from .types.logging_config import IndexConfig +from .types.logging_config import Link +from .types.logging_config import LinkMetadata from .types.logging_config import ListBucketsRequest from .types.logging_config import ListBucketsResponse from .types.logging_config import ListExclusionsRequest from .types.logging_config import ListExclusionsResponse +from .types.logging_config import ListLinksRequest +from .types.logging_config import ListLinksResponse from .types.logging_config import ListSinksRequest from .types.logging_config import ListSinksResponse from .types.logging_config import ListViewsRequest from .types.logging_config import ListViewsResponse +from .types.logging_config import LocationMetadata from .types.logging_config import LogBucket from .types.logging_config import LogExclusion from .types.logging_config import LogSink from .types.logging_config import LogView +from .types.logging_config import Settings from .types.logging_config import UndeleteBucketRequest from .types.logging_config import UpdateBucketRequest from .types.logging_config import UpdateCmekSettingsRequest from .types.logging_config import UpdateExclusionRequest +from .types.logging_config import UpdateSettingsRequest from .types.logging_config import UpdateSinkRequest from .types.logging_config import UpdateViewRequest +from .types.logging_config import IndexType from .types.logging_config import LifecycleState +from .types.logging_config import OperationState from .types.logging_metrics import CreateLogMetricRequest from .types.logging_metrics import DeleteLogMetricRequest from .types.logging_metrics import GetLogMetricRequest @@ -86,16 +106,23 @@ 'ConfigServiceV2AsyncClient', 'LoggingServiceV2AsyncClient', 'MetricsServiceV2AsyncClient', +'BigQueryDataset', 'BigQueryOptions', +'BucketMetadata', 'CmekSettings', 'ConfigServiceV2Client', +'CopyLogEntriesMetadata', +'CopyLogEntriesRequest', +'CopyLogEntriesResponse', 'CreateBucketRequest', 'CreateExclusionRequest', +'CreateLinkRequest', 'CreateLogMetricRequest', 'CreateSinkRequest', 'CreateViewRequest', 'DeleteBucketRequest', 'DeleteExclusionRequest', +'DeleteLinkRequest', 'DeleteLogMetricRequest', 'DeleteLogRequest', 'DeleteSinkRequest', @@ -103,14 +130,22 @@ 'GetBucketRequest', 'GetCmekSettingsRequest', 'GetExclusionRequest', +'GetLinkRequest', 'GetLogMetricRequest', +'GetSettingsRequest', 'GetSinkRequest', 'GetViewRequest', +'IndexConfig', +'IndexType', 'LifecycleState', +'Link', +'LinkMetadata', 'ListBucketsRequest', 'ListBucketsResponse', 'ListExclusionsRequest', 'ListExclusionsResponse', +'ListLinksRequest', +'ListLinksResponse', 'ListLogEntriesRequest', 'ListLogEntriesResponse', 'ListLogMetricsRequest', @@ -123,6 +158,7 @@ 'ListSinksResponse', 'ListViewsRequest', 'ListViewsResponse', +'LocationMetadata', 'LogBucket', 'LogEntry', 'LogEntryOperation', @@ -130,9 +166,12 @@ 'LogExclusion', 'LogMetric', 'LogSink', +'LogSplit', 'LogView', 'LoggingServiceV2Client', 'MetricsServiceV2Client', +'OperationState', +'Settings', 'TailLogEntriesRequest', 'TailLogEntriesResponse', 'UndeleteBucketRequest', @@ -140,6 +179,7 @@ 'UpdateCmekSettingsRequest', 'UpdateExclusionRequest', 'UpdateLogMetricRequest', +'UpdateSettingsRequest', 'UpdateSinkRequest', 'UpdateViewRequest', 'WriteLogEntriesPartialErrors', diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json b/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json index 69112af60b..8d2b1297a0 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json @@ -10,16 +10,31 @@ "grpc": { "libraryClient": "ConfigServiceV2Client", "rpcs": { + "CopyLogEntries": { + "methods": [ + "copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" ] }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, "CreateExclusion": { "methods": [ "create_exclusion" ] }, + "CreateLink": { + "methods": [ + "create_link" + ] + }, "CreateSink": { "methods": [ "create_sink" @@ -40,6 +55,11 @@ "delete_exclusion" ] }, + "DeleteLink": { + "methods": [ + "delete_link" + ] + }, "DeleteSink": { "methods": [ "delete_sink" @@ -65,6 +85,16 @@ "get_exclusion" ] }, + "GetLink": { + "methods": [ + "get_link" + ] + }, + "GetSettings": { + "methods": [ + "get_settings" + ] + }, "GetSink": { "methods": [ "get_sink" @@ -85,6 +115,11 @@ "list_exclusions" ] }, + "ListLinks": { + "methods": [ + "list_links" + ] + }, "ListSinks": { "methods": [ "list_sinks" @@ -105,6 +140,11 @@ "update_bucket" ] }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, "UpdateCmekSettings": { "methods": [ "update_cmek_settings" @@ -115,6 +155,11 @@ "update_exclusion" ] }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] + }, "UpdateSink": { "methods": [ "update_sink" @@ -130,16 +175,31 @@ "grpc-async": { "libraryClient": "ConfigServiceV2AsyncClient", "rpcs": { + "CopyLogEntries": { + "methods": [ + "copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" ] }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, "CreateExclusion": { "methods": [ "create_exclusion" ] }, + "CreateLink": { + "methods": [ + "create_link" + ] + }, "CreateSink": { "methods": [ "create_sink" @@ -160,6 +220,11 @@ "delete_exclusion" ] }, + "DeleteLink": { + "methods": [ + "delete_link" + ] + }, "DeleteSink": { "methods": [ "delete_sink" @@ -185,124 +250,14 @@ "get_exclusion" ] }, - "GetSink": { - "methods": [ - "get_sink" - ] - }, - "GetView": { - "methods": [ - "get_view" - ] - }, - "ListBuckets": { - "methods": [ - "list_buckets" - ] - }, - "ListExclusions": { - "methods": [ - "list_exclusions" - ] - }, - "ListSinks": { - "methods": [ - "list_sinks" - ] - }, - "ListViews": { - "methods": [ - "list_views" - ] - }, - "UndeleteBucket": { - "methods": [ - "undelete_bucket" - ] - }, - "UpdateBucket": { - "methods": [ - "update_bucket" - ] - }, - "UpdateCmekSettings": { - "methods": [ - "update_cmek_settings" - ] - }, - "UpdateExclusion": { - "methods": [ - "update_exclusion" - ] - }, - "UpdateSink": { - "methods": [ - "update_sink" - ] - }, - "UpdateView": { - "methods": [ - "update_view" - ] - } - } - }, - "rest": { - "libraryClient": "ConfigServiceV2Client", - "rpcs": { - "CreateBucket": { - "methods": [ - "create_bucket" - ] - }, - "CreateExclusion": { - "methods": [ - "create_exclusion" - ] - }, - "CreateSink": { - "methods": [ - "create_sink" - ] - }, - "CreateView": { - "methods": [ - "create_view" - ] - }, - "DeleteBucket": { - "methods": [ - "delete_bucket" - ] - }, - "DeleteExclusion": { - "methods": [ - "delete_exclusion" - ] - }, - "DeleteSink": { - "methods": [ - "delete_sink" - ] - }, - "DeleteView": { - "methods": [ - "delete_view" - ] - }, - "GetBucket": { - "methods": [ - "get_bucket" - ] - }, - "GetCmekSettings": { + "GetLink": { "methods": [ - "get_cmek_settings" + "get_link" ] }, - "GetExclusion": { + "GetSettings": { "methods": [ - "get_exclusion" + "get_settings" ] }, "GetSink": { @@ -325,6 +280,11 @@ "list_exclusions" ] }, + "ListLinks": { + "methods": [ + "list_links" + ] + }, "ListSinks": { "methods": [ "list_sinks" @@ -345,6 +305,11 @@ "update_bucket" ] }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, "UpdateCmekSettings": { "methods": [ "update_cmek_settings" @@ -355,6 +320,11 @@ "update_exclusion" ] }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] + }, "UpdateSink": { "methods": [ "update_sink" @@ -440,41 +410,6 @@ ] } } - }, - "rest": { - "libraryClient": "LoggingServiceV2Client", - "rpcs": { - "DeleteLog": { - "methods": [ - "delete_log" - ] - }, - "ListLogEntries": { - "methods": [ - "list_log_entries" - ] - }, - "ListLogs": { - "methods": [ - "list_logs" - ] - }, - "ListMonitoredResourceDescriptors": { - "methods": [ - "list_monitored_resource_descriptors" - ] - }, - "TailLogEntries": { - "methods": [ - "tail_log_entries" - ] - }, - "WriteLogEntries": { - "methods": [ - "write_log_entries" - ] - } - } } } }, @@ -539,36 +474,6 @@ ] } } - }, - "rest": { - "libraryClient": "MetricsServiceV2Client", - "rpcs": { - "CreateLogMetric": { - "methods": [ - "create_log_metric" - ] - }, - "DeleteLogMetric": { - "methods": [ - "delete_log_metric" - ] - }, - "GetLogMetric": { - "methods": [ - "get_log_metric" - ] - }, - "ListLogMetrics": { - "methods": [ - "list_log_metrics" - ] - }, - "UpdateLogMetric": { - "methods": [ - "update_log_metric" - ] - } - } } } } diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 746d2e4878..1c9d959104 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -32,8 +32,12 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,6 +55,8 @@ class ConfigServiceV2AsyncClient: cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) parse_cmek_settings_path = staticmethod(ConfigServiceV2Client.parse_cmek_settings_path) + link_path = staticmethod(ConfigServiceV2Client.link_path) + parse_link_path = staticmethod(ConfigServiceV2Client.parse_link_path) log_bucket_path = staticmethod(ConfigServiceV2Client.log_bucket_path) parse_log_bucket_path = staticmethod(ConfigServiceV2Client.parse_log_bucket_path) log_exclusion_path = staticmethod(ConfigServiceV2Client.log_exclusion_path) @@ -59,6 +65,8 @@ class ConfigServiceV2AsyncClient: parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) + settings_path = staticmethod(ConfigServiceV2Client.settings_path) + parse_settings_path = staticmethod(ConfigServiceV2Client.parse_settings_path) common_billing_account_path = staticmethod(ConfigServiceV2Client.common_billing_account_path) parse_common_billing_account_path = staticmethod(ConfigServiceV2Client.parse_common_billing_account_path) common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) @@ -202,7 +210,7 @@ async def list_buckets(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: - r"""Lists buckets. + r"""Lists log buckets. .. code-block:: python @@ -261,6 +269,7 @@ async def sample_list_buckets(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: The response from ListBuckets. + Iterating over this object will yield results and resolve additional pages automatically. @@ -324,7 +333,7 @@ async def get_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket. + r"""Gets a log bucket. .. code-block:: python @@ -363,7 +372,9 @@ async def sample_get_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.GetBucketRequest(request) @@ -395,6 +406,206 @@ async def sample_get_bucket(): # Done; return the response. return response + async def create_bucket_async(self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_bucket_async, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + async def update_bucket_async(self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_bucket_async, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + async def create_bucket(self, request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, @@ -402,9 +613,9 @@ async def create_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. .. code-block:: python @@ -444,7 +655,9 @@ async def sample_create_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.CreateBucketRequest(request) @@ -483,17 +696,13 @@ async def update_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` + r"""Updates a log bucket. - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. - - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. .. code-block:: python @@ -532,7 +741,9 @@ async def sample_update_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.UpdateBucketRequest(request) @@ -571,9 +782,12 @@ async def delete_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. .. code-block:: python @@ -641,8 +855,9 @@ async def undelete_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. .. code-block:: python @@ -711,7 +926,7 @@ async def list_views(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsAsyncPager: - r"""Lists views on a bucket. + r"""Lists views on a log bucket. .. code-block:: python @@ -762,6 +977,7 @@ async def sample_list_views(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: The response from ListViews. + Iterating over this object will yield results and resolve additional pages automatically. @@ -825,7 +1041,7 @@ async def get_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Gets a view. + r"""Gets a view on a log bucket.. .. code-block:: python @@ -864,8 +1080,8 @@ async def sample_get_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -905,8 +1121,8 @@ async def create_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. .. code-block:: python @@ -946,8 +1162,8 @@ async def sample_create_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -987,8 +1203,11 @@ async def update_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. .. code-block:: python @@ -1027,8 +1246,8 @@ async def sample_update_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1068,7 +1287,10 @@ async def delete_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a view from a bucket. + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. .. code-block:: python @@ -1305,7 +1527,9 @@ async def sample_get_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1321,12 +1545,12 @@ async def sample_get_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1438,8 +1662,9 @@ async def sample_create_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1462,12 +1687,12 @@ async def sample_create_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1576,7 +1801,9 @@ async def sample_update_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1594,16 +1821,18 @@ async def sample_update_sink(): overwritten if, and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1619,12 +1848,12 @@ async def sample_update_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1729,7 +1958,9 @@ async def sample_delete_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1787,15 +2018,20 @@ async def sample_delete_sink(): metadata=metadata, ) - async def list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + async def create_link(self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, *, parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExclusionsAsyncPager: - r"""Lists all the exclusions in a parent resource. + ) -> operation_async.AsyncOperation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. .. code-block:: python @@ -1808,39 +2044,56 @@ async def list_exclusions(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_list_exclusions(): + async def sample_create_link(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.CreateLinkRequest( parent="parent_value", + link_id="link_id_value", ) # Make the request - page_result = client.list_exclusions(request=request) + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): - The request object. The parameters to ``ListExclusions``. + request (Optional[Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]]): + The request object. The parameters to CreateLink. parent (:class:`str`): - Required. The parent resource whose exclusions are to be - listed. + Required. The full resource name of the bucket to create + a link for. :: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + link (:class:`google.cloud.logging_v2.types.Link`): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (:class:`str`): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1848,41 +2101,38 @@ async def sample_list_exclusions(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: - Result returned from ListExclusions. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.ListExclusionsRequest(request) + request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_exclusions, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self._client._transport.create_link, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1902,27 +2152,27 @@ async def sample_list_exclusions(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListExclusionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, ) # Done; return the response. return response - async def get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + async def delete_link(self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. + ) -> operation_async.AsyncOperation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. .. code-block:: python @@ -1935,36 +2185,35 @@ async def get_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_get_exclusion(): + async def sample_delete_link(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.DeleteLinkRequest( name="name_value", ) # Make the request - response = await client.get_exclusion(request=request) + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]]): - The request object. The parameters to ``GetExclusion``. + request (Optional[Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]]): + The request object. The parameters to DeleteLink. name (:class:`str`): - Required. The resource name of an existing exclusion: - - :: + Required. The full resource name of the link to delete. - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1976,18 +2225,19 @@ async def sample_get_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. @@ -1998,7 +2248,7 @@ async def sample_get_exclusion(): raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.GetExclusionRequest(request) + request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2008,16 +2258,8 @@ async def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_exclusion, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self._client._transport.delete_link, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2037,22 +2279,26 @@ async def sample_get_exclusion(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + # Done; return the response. return response - async def create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + async def list_links(self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, *, parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + ) -> pagers.ListLinksAsyncPager: + r"""Lists links. .. code-block:: python @@ -2065,54 +2311,37 @@ async def create_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_create_exclusion(): + async def sample_list_links(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() - exclusion.name = "name_value" - exclusion.filter = "filter_value" - - request = logging_v2.CreateExclusionRequest( + request = logging_v2.ListLinksRequest( parent="parent_value", - exclusion=exclusion, ) # Make the request - response = await client.create_exclusion(request=request) + page_result = client.list_links(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]]): - The request object. The parameters to ``CreateExclusion``. + request (Optional[Union[google.cloud.logging_v2.types.ListLinksRequest, dict]]): + The request object. The parameters to ListLinks. parent (:class:`str`): - Required. The parent resource in which to create the - exclusion: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + Required. The parent resource whose links are to be + listed: - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): - Required. The new exclusion, whose ``name`` parameter is - an exclusion name that is not already used in the parent - resource. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2120,42 +2349,34 @@ async def sample_create_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.CreateExclusionRequest(request) + request = logging_config.ListLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if exclusion is not None: - request.exclusion = exclusion # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_exclusion, - default_timeout=120.0, + self._client._transport.list_links, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2175,21 +2396,27 @@ async def sample_create_exclusion(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLinksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + # Done; return the response. return response - async def update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + async def get_link(self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, *, name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. + ) -> logging_config.Link: + r"""Gets a link. .. code-block:: python @@ -2202,67 +2429,35 @@ async def update_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_update_exclusion(): + async def sample_get_link(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() - exclusion.name = "name_value" - exclusion.filter = "filter_value" - - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.GetLinkRequest( name="name_value", - exclusion=exclusion, ) # Make the request - response = await client.update_exclusion(request=request) + response = await client.get_link(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]]): - The request object. The parameters to ``UpdateExclusion``. + request (Optional[Union[google.cloud.logging_v2.types.GetLinkRequest, dict]]): + The request object. The parameters to GetLink. name (:class:`str`): - Required. The resource name of the exclusion to update: + Required. The resource name of the link: - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): - Required. New values for the existing exclusion. Only - the fields specified in ``update_mask`` are relevant. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A non-empty list of fields to change in the - existing exclusion. New values for the fields are taken - from the corresponding fields in the - [LogExclusion][google.logging.v2.LogExclusion] included - in this request. Fields not mentioned in ``update_mask`` - are not changed and are ignored in the request. - - For example, to change the filter and description of an - exclusion, specify an ``update_mask`` of - ``"filter,description"``. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2270,44 +2465,31 @@ async def sample_update_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.UpdateExclusionRequest(request) + request = logging_config.GetLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: request.name = name - if exclusion is not None: - request.exclusion = exclusion - if update_mask is not None: - request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_exclusion, - default_timeout=120.0, + self._client._transport.get_link, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2330,15 +2512,16 @@ async def sample_update_exclusion(): # Done; return the response. return response - async def delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + async def list_exclusions(self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an exclusion. + ) -> pagers.ListExclusionsAsyncPager: + r"""Lists all the exclusions on the \_Default sink in a parent + resource. .. code-block:: python @@ -2351,36 +2534,37 @@ async def delete_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_delete_exclusion(): + async def sample_list_exclusions(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( - name="name_value", + request = logging_v2.ListExclusionsRequest( + parent="parent_value", ) # Make the request - await client.delete_exclusion(request=request) + page_result = client.list_exclusions(request=request) + + # Handle the response + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]]): - The request object. The parameters to ``DeleteExclusion``. - name (:class:`str`): - Required. The resource name of an existing exclusion to - delete: + request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): + The request object. The parameters to ``ListExclusions``. + parent (:class:`str`): + Required. The parent resource whose exclusions are to be + listed. :: - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" - This corresponds to the ``name`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2388,26 +2572,34 @@ async def sample_delete_exclusion(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.DeleteExclusionRequest(request) + request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_exclusion, + self._client._transport.list_exclusions, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, @@ -2424,34 +2616,39 @@ async def sample_delete_exclusion(): # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), + ("parent", request.parent), )), ) # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExclusionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_exclusion(self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -2464,29 +2661,41 @@ async def get_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_get_cmek_settings(): + async def sample_get_exclusion(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.GetExclusionRequest( name="name_value", ) # Make the request - response = await client.get_cmek_settings(request=request) + response = await client.get_exclusion(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]]): - The request object. The parameters to - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + request (Optional[Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]]): + The request object. The parameters to ``GetExclusion``. + name (:class:`str`): + Required. The resource name of an existing exclusion: - See `Enabling CMEK for Logs - Router `__ - for more information. + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2494,34 +2703,786 @@ async def sample_get_cmek_settings(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.GetExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. + self._client._transport.get_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_exclusion(self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = await client.create_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]]): + The request object. The parameters to ``CreateExclusion``. + parent (:class:`str`): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.CreateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_exclusion(self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = await client.update_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]]): + The request object. The parameters to ``UpdateExclusion``. + name (:class:`str`): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.UpdateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_exclusion(self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + await client.delete_exclusion(request=request) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]]): + The request object. The parameters to ``DeleteExclusion``. + name (:class:`str`): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.DeleteExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_cmek_settings(self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logging CMEK settings for the given resource. + + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]]): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cmek_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_cmek_settings(self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Log Router CMEK settings for the given resource. + + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.update_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]]): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cmek_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_settings(self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + name (:class:`str`): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.GetSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), @@ -2539,26 +3500,31 @@ async def sample_get_cmek_settings(): # Done; return the response. return response - async def update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + async def update_settings(self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the associated service account does not have the required ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for - the key, or 3) access to the key is disabled. + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -2573,29 +3539,53 @@ async def update_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_update_cmek_settings(): + async def sample_update_settings(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.UpdateSettingsRequest( name="name_value", ) # Make the request - response = await client.update_cmek_settings(request=request) + response = await client.update_settings(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]]): The request object. The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + settings (:class:`google.cloud.logging_v2.types.Settings`): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2603,28 +3593,33 @@ async def sample_update_cmek_settings(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. """ # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.UpdateSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_cmek_settings, + self._client._transport.update_settings, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2648,6 +3643,248 @@ async def sample_update_cmek_settings(): # Done; return the response. return response + async def copy_log_entries(self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]]): + The request object. The parameters to CopyLogEntries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. + + """ + # Create or coerce a protobuf request object. + request = logging_config.CopyLogEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.copy_log_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + async def __aenter__(self) -> "ConfigServiceV2AsyncClient": return self diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 4a515374cd..62f41d97a5 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -35,14 +35,17 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import ConfigServiceV2GrpcTransport from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport -from .transports.rest import ConfigServiceV2RestTransport class ConfigServiceV2ClientMeta(type): @@ -55,7 +58,6 @@ class ConfigServiceV2ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - _transport_registry["rest"] = ConfigServiceV2RestTransport def get_transport_class(cls, label: Optional[str] = None, @@ -175,6 +177,17 @@ def parse_cmek_settings_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} + @staticmethod + def link_path(project: str,location: str,bucket: str,link: str,) -> str: + """Returns a fully-qualified link string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + + @staticmethod + def parse_link_path(path: str) -> Dict[str,str]: + """Parses a link path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def log_bucket_path(project: str,location: str,bucket: str,) -> str: """Returns a fully-qualified log_bucket string.""" @@ -219,6 +232,17 @@ def parse_log_view_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def settings_path(project: str,) -> str: + """Returns a fully-qualified settings string.""" + return "projects/{project}/settings".format(project=project, ) + + @staticmethod + def parse_settings_path(path: str) -> Dict[str,str]: + """Parses a settings path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/settings$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str, ) -> str: """Returns a fully-qualified billing_account string.""" @@ -350,9 +374,6 @@ def __init__(self, *, transport (Union[str, ConfigServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - NOTE: "rest" transport functionality is currently in a - beta state (preview). We welcome your feedback via an - issue in this library's source repository. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -432,7 +453,7 @@ def list_buckets(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: - r"""Lists buckets. + r"""Lists log buckets. .. code-block:: python @@ -491,6 +512,7 @@ def sample_list_buckets(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: The response from ListBuckets. + Iterating over this object will yield results and resolve additional pages automatically. @@ -554,7 +576,7 @@ def get_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket. + r"""Gets a log bucket. .. code-block:: python @@ -593,7 +615,9 @@ def sample_get_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -626,6 +650,208 @@ def sample_get_bucket(): # Done; return the response. return response + def create_bucket_async(self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + def update_bucket_async(self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + def create_bucket(self, request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, @@ -633,9 +859,9 @@ def create_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. .. code-block:: python @@ -675,7 +901,9 @@ def sample_create_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -715,17 +943,13 @@ def update_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` + r"""Updates a log bucket. - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. - - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. .. code-block:: python @@ -764,7 +988,9 @@ def sample_update_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -804,9 +1030,12 @@ def delete_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. .. code-block:: python @@ -875,8 +1104,9 @@ def undelete_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. .. code-block:: python @@ -946,7 +1176,7 @@ def list_views(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsPager: - r"""Lists views on a bucket. + r"""Lists views on a log bucket. .. code-block:: python @@ -997,6 +1227,7 @@ def sample_list_views(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: The response from ListViews. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1060,7 +1291,7 @@ def get_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Gets a view. + r"""Gets a view on a log bucket.. .. code-block:: python @@ -1099,8 +1330,8 @@ def sample_get_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1141,8 +1372,8 @@ def create_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. .. code-block:: python @@ -1182,8 +1413,8 @@ def sample_create_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1224,8 +1455,11 @@ def update_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. .. code-block:: python @@ -1264,8 +1498,8 @@ def sample_update_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1306,7 +1540,10 @@ def delete_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a view from a bucket. + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. .. code-block:: python @@ -1536,7 +1773,9 @@ def sample_get_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1552,12 +1791,12 @@ def sample_get_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1661,8 +1900,9 @@ def sample_create_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1685,12 +1925,12 @@ def sample_create_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1799,7 +2039,9 @@ def sample_update_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1817,16 +2059,18 @@ def sample_update_sink(): overwritten if, and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1842,12 +2086,12 @@ def sample_update_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1944,7 +2188,9 @@ def sample_delete_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1994,15 +2240,20 @@ def sample_delete_sink(): metadata=metadata, ) - def list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + def create_link(self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, *, parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExclusionsPager: - r"""Lists all the exclusions in a parent resource. + ) -> operation.Operation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. .. code-block:: python @@ -2015,39 +2266,56 @@ def list_exclusions(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_list_exclusions(): + def sample_create_link(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.CreateLinkRequest( parent="parent_value", + link_id="link_id_value", ) # Make the request - page_result = client.list_exclusions(request=request) + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): - The request object. The parameters to ``ListExclusions``. + request (Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]): + The request object. The parameters to CreateLink. parent (str): - Required. The parent resource whose exclusions are to be - listed. + Required. The full resource name of the bucket to create + a link for. :: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + link (google.cloud.logging_v2.types.Link): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (str): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2055,35 +2323,40 @@ def sample_list_exclusions(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: - Result returned from ListExclusions. + google.api_core.operation.Operation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListExclusionsRequest. + # in a logging_config.CreateLinkRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.ListExclusionsRequest): - request = logging_config.ListExclusionsRequest(request) + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_exclusions] + rpc = self._transport._wrapped_methods[self._transport.create_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2101,27 +2374,27 @@ def sample_list_exclusions(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListExclusionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, ) # Done; return the response. return response - def get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + def delete_link(self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. + ) -> operation.Operation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. .. code-block:: python @@ -2134,36 +2407,35 @@ def get_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_get_exclusion(): + def sample_delete_link(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.DeleteLinkRequest( name="name_value", ) # Make the request - response = client.get_exclusion(request=request) + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): - The request object. The parameters to ``GetExclusion``. + request (Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]): + The request object. The parameters to DeleteLink. name (str): - Required. The resource name of an existing exclusion: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + Required. The full resource name of the link to delete. - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2175,18 +2447,19 @@ def sample_get_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. @@ -2198,11 +2471,11 @@ def sample_get_exclusion(): 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetExclusionRequest. + # in a logging_config.DeleteLinkRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.GetExclusionRequest): - request = logging_config.GetExclusionRequest(request) + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -2210,7 +2483,7 @@ def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_exclusion] + rpc = self._transport._wrapped_methods[self._transport.delete_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2228,22 +2501,26 @@ def sample_get_exclusion(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + # Done; return the response. return response - def create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + def list_links(self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, *, parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + ) -> pagers.ListLinksPager: + r"""Lists links. .. code-block:: python @@ -2256,54 +2533,37 @@ def create_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_create_exclusion(): + def sample_list_links(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() - exclusion.name = "name_value" - exclusion.filter = "filter_value" - - request = logging_v2.CreateExclusionRequest( + request = logging_v2.ListLinksRequest( parent="parent_value", - exclusion=exclusion, ) # Make the request - response = client.create_exclusion(request=request) + page_result = client.list_links(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): - The request object. The parameters to ``CreateExclusion``. + request (Union[google.cloud.logging_v2.types.ListLinksRequest, dict]): + The request object. The parameters to ListLinks. parent (str): - Required. The parent resource in which to create the - exclusion: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + Required. The parent resource whose links are to be + listed: - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - exclusion (google.cloud.logging_v2.types.LogExclusion): - Required. The new exclusion, whose ``name`` parameter is - an exclusion name that is not already used in the parent - resource. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2311,44 +2571,36 @@ def sample_create_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateExclusionRequest. + # in a logging_config.ListLinksRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.CreateExclusionRequest): - request = logging_config.CreateExclusionRequest(request) + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if exclusion is not None: - request.exclusion = exclusion # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_exclusion] + rpc = self._transport._wrapped_methods[self._transport.list_links] # Certain fields should be provided within the metadata header; # add these here. @@ -2366,21 +2618,27 @@ def sample_create_exclusion(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLinksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + # Done; return the response. return response - def update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + def get_link(self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, *, name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. + ) -> logging_config.Link: + r"""Gets a link. .. code-block:: python @@ -2393,67 +2651,35 @@ def update_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_update_exclusion(): + def sample_get_link(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() - exclusion.name = "name_value" - exclusion.filter = "filter_value" - - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.GetLinkRequest( name="name_value", - exclusion=exclusion, ) # Make the request - response = client.update_exclusion(request=request) + response = client.get_link(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): - The request object. The parameters to ``UpdateExclusion``. + request (Union[google.cloud.logging_v2.types.GetLinkRequest, dict]): + The request object. The parameters to GetLink. name (str): - Required. The resource name of the exclusion to update: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + Required. The resource name of the link: - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - exclusion (google.cloud.logging_v2.types.LogExclusion): - Required. New values for the existing exclusion. Only - the fields specified in ``update_mask`` are relevant. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A non-empty list of fields to change in the - existing exclusion. New values for the fields are taken - from the corresponding fields in the - [LogExclusion][google.logging.v2.LogExclusion] included - in this request. Fields not mentioned in ``update_mask`` - are not changed and are ignored in the request. - - For example, to change the filter and description of an - exclusion, specify an ``update_mask`` of - ``"filter,description"``. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2461,46 +2687,33 @@ def sample_update_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateExclusionRequest. + # in a logging_config.GetLinkRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.UpdateExclusionRequest): - request = logging_config.UpdateExclusionRequest(request) + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: request.name = name - if exclusion is not None: - request.exclusion = exclusion - if update_mask is not None: - request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_exclusion] + rpc = self._transport._wrapped_methods[self._transport.get_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2521,15 +2734,16 @@ def sample_update_exclusion(): # Done; return the response. return response - def delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + def list_exclusions(self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an exclusion. + ) -> pagers.ListExclusionsPager: + r"""Lists all the exclusions on the \_Default sink in a parent + resource. .. code-block:: python @@ -2542,36 +2756,37 @@ def delete_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_delete_exclusion(): + def sample_list_exclusions(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( - name="name_value", + request = logging_v2.ListExclusionsRequest( + parent="parent_value", ) # Make the request - client.delete_exclusion(request=request) + page_result = client.list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) Args: - request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): - The request object. The parameters to ``DeleteExclusion``. - name (str): - Required. The resource name of an existing exclusion to - delete: + request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): + The request object. The parameters to ``ListExclusions``. + parent (str): + Required. The parent resource whose exclusions are to be + listed. :: - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" - This corresponds to the ``name`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2579,62 +2794,75 @@ def sample_delete_exclusion(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteExclusionRequest. + # in a logging_config.ListExclusionsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.DeleteExclusionRequest): - request = logging_config.DeleteExclusionRequest(request) + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] + rpc = self._transport._wrapped_methods[self._transport.list_exclusions] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), + ("parent", request.parent), )), ) # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExclusionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_exclusion(self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -2647,29 +2875,41 @@ def get_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_get_cmek_settings(): + def sample_get_exclusion(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.GetExclusionRequest( name="name_value", ) # Make the request - response = client.get_cmek_settings(request=request) + response = client.get_exclusion(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): - The request object. The parameters to - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): + The request object. The parameters to ``GetExclusion``. + name (str): + Required. The resource name of an existing exclusion: - See `Enabling CMEK for Logs - Router `__ - for more information. + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2677,32 +2917,38 @@ def sample_get_cmek_settings(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetCmekSettingsRequest. + # in a logging_config.GetExclusionRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.GetCmekSettingsRequest): - request = logging_config.GetCmekSettingsRequest(request) + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] + rpc = self._transport._wrapped_methods[self._transport.get_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -2723,26 +2969,762 @@ def sample_get_cmek_settings(): # Done; return the response. return response - def update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + def create_exclusion(self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): + The request object. The parameters to ``CreateExclusion``. + parent (str): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (google.cloud.logging_v2.types.LogExclusion): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_exclusion(self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): + The request object. The parameters to ``UpdateExclusion``. + name (str): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (google.cloud.logging_v2.types.LogExclusion): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_exclusion(self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client.delete_exclusion(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): + The request object. The parameters to ``DeleteExclusion``. + name (str): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_cmek_settings(self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logging CMEK settings for the given resource. + + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetCmekSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_cmek_settings(self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Log Router CMEK settings for the given resource. + + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateCmekSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_settings(self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + name (str): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_settings(self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the associated service account does not have the required ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for - the key, or 3) access to the key is disabled. + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -2757,29 +3739,53 @@ def update_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_update_cmek_settings(): + def sample_update_settings(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.UpdateSettingsRequest( name="name_value", ) # Make the request - response = client.update_cmek_settings(request=request) + response = client.update_settings(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): + request (Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]): The request object. The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2787,32 +3793,36 @@ def sample_update_cmek_settings(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. """ # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateCmekSettingsRequest. + # in a logging_config.UpdateSettingsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.UpdateCmekSettingsRequest): - request = logging_config.UpdateCmekSettingsRequest(request) + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] + rpc = self._transport._wrapped_methods[self._transport.update_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -2833,6 +3843,97 @@ def sample_update_cmek_settings(): # Done; return the response. return response + def copy_log_entries(self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]): + The request object. The parameters to CopyLogEntries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CopyLogEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ConfigServiceV2Client": return self @@ -2846,6 +3947,158 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index fb524efd8b..444519d150 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -381,6 +381,127 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) +class ListLinksPager: + """A pager for iterating through ``list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListLinksResponse], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[logging_config.Link]: + for page in self.pages: + yield from page.links + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLinksAsyncPager: + """A pager for iterating through ``list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListLinksResponse]], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.Link]: + async def async_generator(): + async for page in self.pages: + for response in page.links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + class ListExclusionsPager: """A pager for iterating through ``list_exclusions`` requests. diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 8aaccee9e9..1ba655878d 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -19,20 +19,15 @@ from .base import ConfigServiceV2Transport from .grpc import ConfigServiceV2GrpcTransport from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport -from .rest import ConfigServiceV2RestTransport -from .rest import ConfigServiceV2RestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry['grpc'] = ConfigServiceV2GrpcTransport _transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport -_transport_registry['rest'] = ConfigServiceV2RestTransport __all__ = ( 'ConfigServiceV2Transport', 'ConfigServiceV2GrpcTransport', 'ConfigServiceV2GrpcAsyncIOTransport', - 'ConfigServiceV2RestTransport', - 'ConfigServiceV2RestInterceptor', ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index e30689cd03..3b1f9f96fc 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -23,10 +23,12 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -127,6 +129,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_bucket_async: gapic_v1.method.wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: gapic_v1.method.wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), self.create_bucket: gapic_v1.method.wrap_method( self.create_bucket, default_timeout=None, @@ -229,6 +241,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_link: gapic_v1.method.wrap_method( + self.create_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_link: gapic_v1.method.wrap_method( + self.delete_link, + default_timeout=None, + client_info=client_info, + ), + self.list_links: gapic_v1.method.wrap_method( + self.list_links, + default_timeout=None, + client_info=client_info, + ), + self.get_link: gapic_v1.method.wrap_method( + self.get_link, + default_timeout=None, + client_info=client_info, + ), self.list_exclusions: gapic_v1.method.wrap_method( self.list_exclusions, default_retry=retries.Retry( @@ -288,6 +320,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_settings: gapic_v1.method.wrap_method( + self.get_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_settings: gapic_v1.method.wrap_method( + self.update_settings, + default_timeout=None, + client_info=client_info, + ), + self.copy_log_entries: gapic_v1.method.wrap_method( + self.copy_log_entries, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -299,6 +346,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def list_buckets(self) -> Callable[ [logging_config.ListBucketsRequest], @@ -317,6 +369,24 @@ def get_bucket(self) -> Callable[ ]]: raise NotImplementedError() + @property + def create_bucket_async(self) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_bucket_async(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + @property def create_bucket(self) -> Callable[ [logging_config.CreateBucketRequest], @@ -443,6 +513,42 @@ def delete_sink(self) -> Callable[ ]]: raise NotImplementedError() + @property + def create_link(self) -> Callable[ + [logging_config.CreateLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_link(self) -> Callable[ + [logging_config.DeleteLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_links(self) -> Callable[ + [logging_config.ListLinksRequest], + Union[ + logging_config.ListLinksResponse, + Awaitable[logging_config.ListLinksResponse] + ]]: + raise NotImplementedError() + + @property + def get_link(self) -> Callable[ + [logging_config.GetLinkRequest], + Union[ + logging_config.Link, + Awaitable[logging_config.Link] + ]]: + raise NotImplementedError() + @property def list_exclusions(self) -> Callable[ [logging_config.ListExclusionsRequest], @@ -506,6 +612,60 @@ def update_cmek_settings(self) -> Callable[ ]]: raise NotImplementedError() + @property + def get_settings(self) -> Callable[ + [logging_config.GetSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ]]: + raise NotImplementedError() + + @property + def update_settings(self) -> Callable[ + [logging_config.UpdateSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ]]: + raise NotImplementedError() + + @property + def copy_log_entries(self) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 8a58c15f81..fc73445930 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -17,6 +17,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers +from google.api_core import operations_v1 from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore @@ -25,6 +26,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -109,6 +111,7 @@ def __init__(self, *, self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -226,13 +229,29 @@ def grpc_channel(self) -> grpc.Channel: """ return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_buckets(self) -> Callable[ [logging_config.ListBucketsRequest], logging_config.ListBucketsResponse]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets. + Lists log buckets. Returns: Callable[[~.ListBucketsRequest], @@ -258,7 +277,7 @@ def get_bucket(self) -> Callable[ logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket. + Gets a log bucket. Returns: Callable[[~.GetBucketRequest], @@ -278,15 +297,76 @@ def get_bucket(self) -> Callable[ ) return self._stubs['get_bucket'] + @property + def create_bucket_async(self) -> Callable[ + [logging_config.CreateBucketRequest], + operations_pb2.Operation]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket_async' not in self._stubs: + self._stubs['create_bucket_async'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_bucket_async'] + + @property + def update_bucket_async(self) -> Callable[ + [logging_config.UpdateBucketRequest], + operations_pb2.Operation]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket_async' not in self._stubs: + self._stubs['update_bucket_async'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_bucket_async'] + @property def create_bucket(self) -> Callable[ [logging_config.CreateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the create bucket method over gRPC. - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. Returns: Callable[[~.CreateBucketRequest], @@ -312,17 +392,13 @@ def update_bucket(self) -> Callable[ logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` + Updates a log bucket. - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. - - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. Returns: Callable[[~.UpdateBucketRequest], @@ -348,9 +424,12 @@ def delete_bucket(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. Returns: Callable[[~.DeleteBucketRequest], @@ -376,8 +455,9 @@ def undelete_bucket(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. Returns: Callable[[~.UndeleteBucketRequest], @@ -403,7 +483,7 @@ def list_views(self) -> Callable[ logging_config.ListViewsResponse]: r"""Return a callable for the list views method over gRPC. - Lists views on a bucket. + Lists views on a log bucket. Returns: Callable[[~.ListViewsRequest], @@ -429,7 +509,7 @@ def get_view(self) -> Callable[ logging_config.LogView]: r"""Return a callable for the get view method over gRPC. - Gets a view. + Gets a view on a log bucket.. Returns: Callable[[~.GetViewRequest], @@ -455,8 +535,8 @@ def create_view(self) -> Callable[ logging_config.LogView]: r"""Return a callable for the create view method over gRPC. - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. Returns: Callable[[~.CreateViewRequest], @@ -482,8 +562,11 @@ def update_view(self) -> Callable[ logging_config.LogView]: r"""Return a callable for the update view method over gRPC. - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. Returns: Callable[[~.UpdateViewRequest], @@ -509,7 +592,10 @@ def delete_view(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. - Deletes a view from a bucket. + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. Returns: Callable[[~.DeleteViewRequest], @@ -669,13 +755,122 @@ def delete_sink(self) -> Callable[ ) return self._stubs['delete_sink'] + @property + def create_link(self) -> Callable[ + [logging_config.CreateLinkRequest], + operations_pb2.Operation]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_link' not in self._stubs: + self._stubs['create_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateLink', + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_link'] + + @property + def delete_link(self) -> Callable[ + [logging_config.DeleteLinkRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_link' not in self._stubs: + self._stubs['delete_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteLink', + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_link'] + + @property + def list_links(self) -> Callable[ + [logging_config.ListLinksRequest], + logging_config.ListLinksResponse]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + ~.ListLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_links' not in self._stubs: + self._stubs['list_links'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListLinks', + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs['list_links'] + + @property + def get_link(self) -> Callable[ + [logging_config.GetLinkRequest], + logging_config.Link]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + ~.Link]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_link' not in self._stubs: + self._stubs['get_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetLink', + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs['get_link'] + @property def list_exclusions(self) -> Callable[ [logging_config.ListExclusionsRequest], logging_config.ListExclusionsResponse]: r"""Return a callable for the list exclusions method over gRPC. - Lists all the exclusions in a parent resource. + Lists all the exclusions on the \_Default sink in a parent + resource. Returns: Callable[[~.ListExclusionsRequest], @@ -701,7 +896,7 @@ def get_exclusion(self) -> Callable[ logging_config.LogExclusion]: r"""Return a callable for the get exclusion method over gRPC. - Gets the description of an exclusion. + Gets the description of an exclusion in the \_Default sink. Returns: Callable[[~.GetExclusionRequest], @@ -727,10 +922,9 @@ def create_exclusion(self) -> Callable[ logging_config.LogExclusion]: r"""Return a callable for the create exclusion method over gRPC. - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. Returns: Callable[[~.CreateExclusionRequest], @@ -756,8 +950,8 @@ def update_exclusion(self) -> Callable[ logging_config.LogExclusion]: r"""Return a callable for the update exclusion method over gRPC. - Changes one or more properties of an existing - exclusion. + Changes one or more properties of an existing exclusion in the + \_Default sink. Returns: Callable[[~.UpdateExclusionRequest], @@ -783,7 +977,7 @@ def delete_exclusion(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. - Deletes an exclusion. + Deletes an exclusion in the \_Default sink. Returns: Callable[[~.DeleteExclusionRequest], @@ -809,13 +1003,14 @@ def get_cmek_settings(self) -> Callable[ logging_config.CmekSettings]: r"""Return a callable for the get cmek settings method over gRPC. - Gets the Logs Router CMEK settings for the given resource. + Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -843,11 +1038,11 @@ def update_cmek_settings(self) -> Callable[ logging_config.CmekSettings]: r"""Return a callable for the update cmek settings method over gRPC. - Updates the Logs Router CMEK settings for the given resource. + Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -855,7 +1050,7 @@ def update_cmek_settings(self) -> Callable[ ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -877,9 +1072,169 @@ def update_cmek_settings(self) -> Callable[ ) return self._stubs['update_cmek_settings'] + @property + def get_settings(self) -> Callable[ + [logging_config.GetSettingsRequest], + logging_config.Settings]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_settings' not in self._stubs: + self._stubs['get_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSettings', + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['get_settings'] + + @property + def update_settings(self) -> Callable[ + [logging_config.UpdateSettingsRequest], + logging_config.Settings]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_settings' not in self._stubs: + self._stubs['update_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSettings', + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['update_settings'] + + @property + def copy_log_entries(self) -> Callable[ + [logging_config.CopyLogEntriesRequest], + operations_pb2.Operation]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'copy_log_entries' not in self._stubs: + self._stubs['copy_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['copy_log_entries'] + def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 6e208e9a75..e68ff98164 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -25,6 +26,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport @@ -154,6 +156,7 @@ def __init__(self, *, self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -229,13 +232,29 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_buckets(self) -> Callable[ [logging_config.ListBucketsRequest], Awaitable[logging_config.ListBucketsResponse]]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets. + Lists log buckets. Returns: Callable[[~.ListBucketsRequest], @@ -261,7 +280,7 @@ def get_bucket(self) -> Callable[ Awaitable[logging_config.LogBucket]]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket. + Gets a log bucket. Returns: Callable[[~.GetBucketRequest], @@ -281,15 +300,76 @@ def get_bucket(self) -> Callable[ ) return self._stubs['get_bucket'] + @property + def create_bucket_async(self) -> Callable[ + [logging_config.CreateBucketRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket_async' not in self._stubs: + self._stubs['create_bucket_async'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_bucket_async'] + + @property + def update_bucket_async(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket_async' not in self._stubs: + self._stubs['update_bucket_async'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_bucket_async'] + @property def create_bucket(self) -> Callable[ [logging_config.CreateBucketRequest], Awaitable[logging_config.LogBucket]]: r"""Return a callable for the create bucket method over gRPC. - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. Returns: Callable[[~.CreateBucketRequest], @@ -315,17 +395,13 @@ def update_bucket(self) -> Callable[ Awaitable[logging_config.LogBucket]]: r"""Return a callable for the update bucket method over gRPC. - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` + Updates a log bucket. - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. - - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. Returns: Callable[[~.UpdateBucketRequest], @@ -351,9 +427,12 @@ def delete_bucket(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. Returns: Callable[[~.DeleteBucketRequest], @@ -379,8 +458,9 @@ def undelete_bucket(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. Returns: Callable[[~.UndeleteBucketRequest], @@ -406,7 +486,7 @@ def list_views(self) -> Callable[ Awaitable[logging_config.ListViewsResponse]]: r"""Return a callable for the list views method over gRPC. - Lists views on a bucket. + Lists views on a log bucket. Returns: Callable[[~.ListViewsRequest], @@ -432,7 +512,7 @@ def get_view(self) -> Callable[ Awaitable[logging_config.LogView]]: r"""Return a callable for the get view method over gRPC. - Gets a view. + Gets a view on a log bucket.. Returns: Callable[[~.GetViewRequest], @@ -458,8 +538,8 @@ def create_view(self) -> Callable[ Awaitable[logging_config.LogView]]: r"""Return a callable for the create view method over gRPC. - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. Returns: Callable[[~.CreateViewRequest], @@ -485,8 +565,11 @@ def update_view(self) -> Callable[ Awaitable[logging_config.LogView]]: r"""Return a callable for the update view method over gRPC. - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. Returns: Callable[[~.UpdateViewRequest], @@ -512,7 +595,10 @@ def delete_view(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. - Deletes a view from a bucket. + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. Returns: Callable[[~.DeleteViewRequest], @@ -672,13 +758,122 @@ def delete_sink(self) -> Callable[ ) return self._stubs['delete_sink'] + @property + def create_link(self) -> Callable[ + [logging_config.CreateLinkRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_link' not in self._stubs: + self._stubs['create_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateLink', + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_link'] + + @property + def delete_link(self) -> Callable[ + [logging_config.DeleteLinkRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_link' not in self._stubs: + self._stubs['delete_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteLink', + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_link'] + + @property + def list_links(self) -> Callable[ + [logging_config.ListLinksRequest], + Awaitable[logging_config.ListLinksResponse]]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + Awaitable[~.ListLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_links' not in self._stubs: + self._stubs['list_links'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListLinks', + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs['list_links'] + + @property + def get_link(self) -> Callable[ + [logging_config.GetLinkRequest], + Awaitable[logging_config.Link]]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + Awaitable[~.Link]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_link' not in self._stubs: + self._stubs['get_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetLink', + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs['get_link'] + @property def list_exclusions(self) -> Callable[ [logging_config.ListExclusionsRequest], Awaitable[logging_config.ListExclusionsResponse]]: r"""Return a callable for the list exclusions method over gRPC. - Lists all the exclusions in a parent resource. + Lists all the exclusions on the \_Default sink in a parent + resource. Returns: Callable[[~.ListExclusionsRequest], @@ -704,7 +899,7 @@ def get_exclusion(self) -> Callable[ Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the get exclusion method over gRPC. - Gets the description of an exclusion. + Gets the description of an exclusion in the \_Default sink. Returns: Callable[[~.GetExclusionRequest], @@ -730,10 +925,9 @@ def create_exclusion(self) -> Callable[ Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the create exclusion method over gRPC. - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. Returns: Callable[[~.CreateExclusionRequest], @@ -759,8 +953,8 @@ def update_exclusion(self) -> Callable[ Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the update exclusion method over gRPC. - Changes one or more properties of an existing - exclusion. + Changes one or more properties of an existing exclusion in the + \_Default sink. Returns: Callable[[~.UpdateExclusionRequest], @@ -786,7 +980,7 @@ def delete_exclusion(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. - Deletes an exclusion. + Deletes an exclusion in the \_Default sink. Returns: Callable[[~.DeleteExclusionRequest], @@ -812,13 +1006,14 @@ def get_cmek_settings(self) -> Callable[ Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the get cmek settings method over gRPC. - Gets the Logs Router CMEK settings for the given resource. + Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -846,11 +1041,11 @@ def update_cmek_settings(self) -> Callable[ Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the update cmek settings method over gRPC. - Updates the Logs Router CMEK settings for the given resource. + Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -858,7 +1053,7 @@ def update_cmek_settings(self) -> Callable[ ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -880,9 +1075,169 @@ def update_cmek_settings(self) -> Callable[ ) return self._stubs['update_cmek_settings'] + @property + def get_settings(self) -> Callable[ + [logging_config.GetSettingsRequest], + Awaitable[logging_config.Settings]]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_settings' not in self._stubs: + self._stubs['get_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSettings', + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['get_settings'] + + @property + def update_settings(self) -> Callable[ + [logging_config.UpdateSettingsRequest], + Awaitable[logging_config.Settings]]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_settings' not in self._stubs: + self._stubs['update_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSettings', + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['update_settings'] + + @property + def copy_log_entries(self) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'copy_log_entries' not in self._stubs: + self._stubs['copy_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['copy_log_entries'] + def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ( 'ConfigServiceV2GrpcAsyncIOTransport', diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py deleted file mode 100755 index 0a90ea99c0..0000000000 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py +++ /dev/null @@ -1,3141 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 # type: ignore - -from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class ConfigServiceV2RestInterceptor: - """Interceptor for ConfigServiceV2. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ConfigServiceV2RestTransport. - - .. code-block:: python - class MyCustomConfigServiceV2Interceptor(ConfigServiceV2RestInterceptor): - def pre_create_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_bucket(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_exclusion(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_exclusion(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_sink(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_sink(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_view(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_view(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_exclusion(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_sink(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_view(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_bucket(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_cmek_settings(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_cmek_settings(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_exclusion(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_exclusion(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_sink(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_sink(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_view(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_view(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_buckets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_buckets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_exclusions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_exclusions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_sinks(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_sinks(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_views(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_views(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_undelete_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_update_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_bucket(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_cmek_settings(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_cmek_settings(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_exclusion(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_exclusion(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_sink(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_sink(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_view(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_view(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ConfigServiceV2RestTransport(interceptor=MyCustomConfigServiceV2Interceptor()) - client = ConfigServiceV2Client(transport=transport) - - - """ - def pre_create_bucket(self, request: logging_config.CreateBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_create_bucket(self, response: logging_config.LogBucket) -> logging_config.LogBucket: - """Post-rpc interceptor for create_bucket - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_create_exclusion(self, request: logging_config.CreateExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateExclusionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_exclusion - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_create_exclusion(self, response: logging_config.LogExclusion) -> logging_config.LogExclusion: - """Post-rpc interceptor for create_exclusion - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_create_sink(self, request: logging_config.CreateSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateSinkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_sink - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_create_sink(self, response: logging_config.LogSink) -> logging_config.LogSink: - """Post-rpc interceptor for create_sink - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_create_view(self, request: logging_config.CreateViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateViewRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_view - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_create_view(self, response: logging_config.LogView) -> logging_config.LogView: - """Post-rpc interceptor for create_view - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_delete_bucket(self, request: logging_config.DeleteBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_delete_exclusion(self, request: logging_config.DeleteExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteExclusionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_exclusion - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_delete_sink(self, request: logging_config.DeleteSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteSinkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_sink - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_delete_view(self, request: logging_config.DeleteViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteViewRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_view - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_get_bucket(self, request: logging_config.GetBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_bucket(self, response: logging_config.LogBucket) -> logging_config.LogBucket: - """Post-rpc interceptor for get_bucket - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_get_cmek_settings(self, request: logging_config.GetCmekSettingsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetCmekSettingsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_cmek_settings - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_cmek_settings(self, response: logging_config.CmekSettings) -> logging_config.CmekSettings: - """Post-rpc interceptor for get_cmek_settings - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_get_exclusion(self, request: logging_config.GetExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetExclusionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_exclusion - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_exclusion(self, response: logging_config.LogExclusion) -> logging_config.LogExclusion: - """Post-rpc interceptor for get_exclusion - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_get_sink(self, request: logging_config.GetSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetSinkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_sink - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_sink(self, response: logging_config.LogSink) -> logging_config.LogSink: - """Post-rpc interceptor for get_sink - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_get_view(self, request: logging_config.GetViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetViewRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_view - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_view(self, response: logging_config.LogView) -> logging_config.LogView: - """Post-rpc interceptor for get_view - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_buckets(self, request: logging_config.ListBucketsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListBucketsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_buckets - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_list_buckets(self, response: logging_config.ListBucketsResponse) -> logging_config.ListBucketsResponse: - """Post-rpc interceptor for list_buckets - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_exclusions(self, request: logging_config.ListExclusionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListExclusionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_exclusions - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_list_exclusions(self, response: logging_config.ListExclusionsResponse) -> logging_config.ListExclusionsResponse: - """Post-rpc interceptor for list_exclusions - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_sinks(self, request: logging_config.ListSinksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListSinksRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_sinks - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_list_sinks(self, response: logging_config.ListSinksResponse) -> logging_config.ListSinksResponse: - """Post-rpc interceptor for list_sinks - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_views(self, request: logging_config.ListViewsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListViewsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_views - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_list_views(self, response: logging_config.ListViewsResponse) -> logging_config.ListViewsResponse: - """Post-rpc interceptor for list_views - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_undelete_bucket(self, request: logging_config.UndeleteBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UndeleteBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for undelete_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_update_bucket(self, request: logging_config.UpdateBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_bucket(self, response: logging_config.LogBucket) -> logging_config.LogBucket: - """Post-rpc interceptor for update_bucket - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_cmek_settings(self, request: logging_config.UpdateCmekSettingsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateCmekSettingsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_cmek_settings - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_cmek_settings(self, response: logging_config.CmekSettings) -> logging_config.CmekSettings: - """Post-rpc interceptor for update_cmek_settings - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_exclusion(self, request: logging_config.UpdateExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateExclusionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_exclusion - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_exclusion(self, response: logging_config.LogExclusion) -> logging_config.LogExclusion: - """Post-rpc interceptor for update_exclusion - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_sink(self, request: logging_config.UpdateSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateSinkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_sink - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_sink(self, response: logging_config.LogSink) -> logging_config.LogSink: - """Post-rpc interceptor for update_sink - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_view(self, request: logging_config.UpdateViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateViewRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_view - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_view(self, response: logging_config.LogView) -> logging_config.LogView: - """Post-rpc interceptor for update_view - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ConfigServiceV2RestStub: - _session: AuthorizedSession - _host: str - _interceptor: ConfigServiceV2RestInterceptor - - -class ConfigServiceV2RestTransport(ConfigServiceV2Transport): - """REST backend transport for ConfigServiceV2. - - Service for configuring sinks used to route log entries. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! - """ - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[ConfigServiceV2RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ConfigServiceV2RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("CreateBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "bucketId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.CreateBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogBucket: - r"""Call the create bucket method over HTTP. - - Args: - request (~.logging_config.CreateBucketRequest): - The request object. The parameters to ``CreateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogBucket: - Describes a repository of logs. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=*/*/locations/*}/buckets', - 'body': 'bucket', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/buckets', - 'body': 'bucket', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/buckets', - 'body': 'bucket', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=folders/*/locations/*}/buckets', - 'body': 'bucket', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=billingAccounts/*/locations/*}/buckets', - 'body': 'bucket', - }, - ] - request, metadata = self._interceptor.pre_create_bucket(request, metadata) - pb_request = logging_config.CreateBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogBucket() - pb_resp = logging_config.LogBucket.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_bucket(resp) - return resp - - class _CreateExclusion(ConfigServiceV2RestStub): - def __hash__(self): - return hash("CreateExclusion") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.CreateExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogExclusion: - r"""Call the create exclusion method over HTTP. - - Args: - request (~.logging_config.CreateExclusionRequest): - The request object. The parameters to ``CreateExclusion``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=*/*}/exclusions', - 'body': 'exclusion', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/exclusions', - 'body': 'exclusion', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/exclusions', - 'body': 'exclusion', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=folders/*}/exclusions', - 'body': 'exclusion', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=billingAccounts/*}/exclusions', - 'body': 'exclusion', - }, - ] - request, metadata = self._interceptor.pre_create_exclusion(request, metadata) - pb_request = logging_config.CreateExclusionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogExclusion() - pb_resp = logging_config.LogExclusion.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_exclusion(resp) - return resp - - class _CreateSink(ConfigServiceV2RestStub): - def __hash__(self): - return hash("CreateSink") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.CreateSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogSink: - r"""Call the create sink method over HTTP. - - Args: - request (~.logging_config.CreateSinkRequest): - The request object. The parameters to ``CreateSink``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=*/*}/sinks', - 'body': 'sink', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/sinks', - 'body': 'sink', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/sinks', - 'body': 'sink', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=folders/*}/sinks', - 'body': 'sink', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=billingAccounts/*}/sinks', - 'body': 'sink', - }, - ] - request, metadata = self._interceptor.pre_create_sink(request, metadata) - pb_request = logging_config.CreateSinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogSink() - pb_resp = logging_config.LogSink.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_sink(resp) - return resp - - class _CreateView(ConfigServiceV2RestStub): - def __hash__(self): - return hash("CreateView") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "viewId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.CreateViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogView: - r"""Call the create view method over HTTP. - - Args: - request (~.logging_config.CreateViewRequest): - The request object. The parameters to ``CreateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogView: - Describes a view over logs in a - bucket. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=*/*/locations/*/buckets/*}/views', - 'body': 'view', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*/buckets/*}/views', - 'body': 'view', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*/buckets/*}/views', - 'body': 'view', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=folders/*/locations/*/buckets/*}/views', - 'body': 'view', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views', - 'body': 'view', - }, - ] - request, metadata = self._interceptor.pre_create_view(request, metadata) - pb_request = logging_config.CreateViewRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogView() - pb_resp = logging_config.LogView.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_view(resp) - return resp - - class _DeleteBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("DeleteBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.DeleteBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete bucket method over HTTP. - - Args: - request (~.logging_config.DeleteBucketRequest): - The request object. The parameters to ``DeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=*/*/locations/*/buckets/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_bucket(request, metadata) - pb_request = logging_config.DeleteBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteExclusion(ConfigServiceV2RestStub): - def __hash__(self): - return hash("DeleteExclusion") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.DeleteExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete exclusion method over HTTP. - - Args: - request (~.logging_config.DeleteExclusionRequest): - The request object. The parameters to ``DeleteExclusion``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=*/*/exclusions/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/exclusions/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/exclusions/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=folders/*/exclusions/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=billingAccounts/*/exclusions/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_exclusion(request, metadata) - pb_request = logging_config.DeleteExclusionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteSink(ConfigServiceV2RestStub): - def __hash__(self): - return hash("DeleteSink") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.DeleteSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete sink method over HTTP. - - Args: - request (~.logging_config.DeleteSinkRequest): - The request object. The parameters to ``DeleteSink``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{sink_name=*/*/sinks/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{sink_name=projects/*/sinks/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{sink_name=organizations/*/sinks/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{sink_name=folders/*/sinks/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_sink(request, metadata) - pb_request = logging_config.DeleteSinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteView(ConfigServiceV2RestStub): - def __hash__(self): - return hash("DeleteView") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.DeleteViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete view method over HTTP. - - Args: - request (~.logging_config.DeleteViewRequest): - The request object. The parameters to ``DeleteView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=*/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_view(request, metadata) - pb_request = logging_config.DeleteViewRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogBucket: - r"""Call the get bucket method over HTTP. - - Args: - request (~.logging_config.GetBucketRequest): - The request object. The parameters to ``GetBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogBucket: - Describes a repository of logs. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=*/*/locations/*/buckets/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=billingAccounts/*/buckets/*}', - }, - ] - request, metadata = self._interceptor.pre_get_bucket(request, metadata) - pb_request = logging_config.GetBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogBucket() - pb_resp = logging_config.LogBucket.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_bucket(resp) - return resp - - class _GetCmekSettings(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetCmekSettings") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetCmekSettingsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.CmekSettings: - r"""Call the get cmek settings method over HTTP. - - Args: - request (~.logging_config.GetCmekSettingsRequest): - The request object. The parameters to - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - - See `Enabling CMEK for Logs - Router `__ - for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.CmekSettings: - Describes the customer-managed encryption key (CMEK) - settings associated with a project, folder, - organization, billing account, or flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=*/*}/cmekSettings', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*}/cmekSettings', - }, - ] - request, metadata = self._interceptor.pre_get_cmek_settings(request, metadata) - pb_request = logging_config.GetCmekSettingsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.CmekSettings() - pb_resp = logging_config.CmekSettings.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_cmek_settings(resp) - return resp - - class _GetExclusion(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetExclusion") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogExclusion: - r"""Call the get exclusion method over HTTP. - - Args: - request (~.logging_config.GetExclusionRequest): - The request object. The parameters to ``GetExclusion``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=*/*/exclusions/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/exclusions/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/exclusions/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=folders/*/exclusions/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=billingAccounts/*/exclusions/*}', - }, - ] - request, metadata = self._interceptor.pre_get_exclusion(request, metadata) - pb_request = logging_config.GetExclusionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogExclusion() - pb_resp = logging_config.LogExclusion.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_exclusion(resp) - return resp - - class _GetSink(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetSink") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogSink: - r"""Call the get sink method over HTTP. - - Args: - request (~.logging_config.GetSinkRequest): - The request object. The parameters to ``GetSink``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{sink_name=*/*/sinks/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{sink_name=projects/*/sinks/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{sink_name=organizations/*/sinks/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{sink_name=folders/*/sinks/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', - }, - ] - request, metadata = self._interceptor.pre_get_sink(request, metadata) - pb_request = logging_config.GetSinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogSink() - pb_resp = logging_config.LogSink.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_sink(resp) - return resp - - class _GetView(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetView") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogView: - r"""Call the get view method over HTTP. - - Args: - request (~.logging_config.GetViewRequest): - The request object. The parameters to ``GetView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogView: - Describes a view over logs in a - bucket. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=*/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=billingAccounts/*/buckets/*/views/*}', - }, - ] - request, metadata = self._interceptor.pre_get_view(request, metadata) - pb_request = logging_config.GetViewRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogView() - pb_resp = logging_config.LogView.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_view(resp) - return resp - - class _ListBuckets(ConfigServiceV2RestStub): - def __hash__(self): - return hash("ListBuckets") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.ListBucketsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.ListBucketsResponse: - r"""Call the list buckets method over HTTP. - - Args: - request (~.logging_config.ListBucketsRequest): - The request object. The parameters to ``ListBuckets``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.ListBucketsResponse: - The response from ListBuckets. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*/locations/*}/buckets', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/buckets', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/buckets', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*/locations/*}/buckets', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*/locations/*}/buckets', - }, - ] - request, metadata = self._interceptor.pre_list_buckets(request, metadata) - pb_request = logging_config.ListBucketsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.ListBucketsResponse() - pb_resp = logging_config.ListBucketsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_buckets(resp) - return resp - - class _ListExclusions(ConfigServiceV2RestStub): - def __hash__(self): - return hash("ListExclusions") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.ListExclusionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.ListExclusionsResponse: - r"""Call the list exclusions method over HTTP. - - Args: - request (~.logging_config.ListExclusionsRequest): - The request object. The parameters to ``ListExclusions``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.ListExclusionsResponse: - Result returned from ``ListExclusions``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*}/exclusions', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/exclusions', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/exclusions', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*}/exclusions', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*}/exclusions', - }, - ] - request, metadata = self._interceptor.pre_list_exclusions(request, metadata) - pb_request = logging_config.ListExclusionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.ListExclusionsResponse() - pb_resp = logging_config.ListExclusionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_exclusions(resp) - return resp - - class _ListSinks(ConfigServiceV2RestStub): - def __hash__(self): - return hash("ListSinks") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.ListSinksRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.ListSinksResponse: - r"""Call the list sinks method over HTTP. - - Args: - request (~.logging_config.ListSinksRequest): - The request object. The parameters to ``ListSinks``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.ListSinksResponse: - Result returned from ``ListSinks``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*}/sinks', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/sinks', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/sinks', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*}/sinks', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*}/sinks', - }, - ] - request, metadata = self._interceptor.pre_list_sinks(request, metadata) - pb_request = logging_config.ListSinksRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.ListSinksResponse() - pb_resp = logging_config.ListSinksResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_sinks(resp) - return resp - - class _ListViews(ConfigServiceV2RestStub): - def __hash__(self): - return hash("ListViews") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.ListViewsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.ListViewsResponse: - r"""Call the list views method over HTTP. - - Args: - request (~.logging_config.ListViewsRequest): - The request object. The parameters to ``ListViews``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.ListViewsResponse: - The response from ListViews. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*/locations/*/buckets/*}/views', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*/buckets/*}/views', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*/buckets/*}/views', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*/locations/*/buckets/*}/views', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views', - }, - ] - request, metadata = self._interceptor.pre_list_views(request, metadata) - pb_request = logging_config.ListViewsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.ListViewsResponse() - pb_resp = logging_config.ListViewsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_views(resp) - return resp - - class _UndeleteBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UndeleteBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UndeleteBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the undelete bucket method over HTTP. - - Args: - request (~.logging_config.UndeleteBucketRequest): - The request object. The parameters to ``UndeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=*/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_undelete_bucket(request, metadata) - pb_request = logging_config.UndeleteBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _UpdateBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogBucket: - r"""Call the update bucket method over HTTP. - - Args: - request (~.logging_config.UpdateBucketRequest): - The request object. The parameters to ``UpdateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogBucket: - Describes a repository of logs. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=*/*/locations/*/buckets/*}', - 'body': 'bucket', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*}', - 'body': 'bucket', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}', - 'body': 'bucket', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*}', - 'body': 'bucket', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*}', - 'body': 'bucket', - }, - ] - request, metadata = self._interceptor.pre_update_bucket(request, metadata) - pb_request = logging_config.UpdateBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogBucket() - pb_resp = logging_config.LogBucket.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_bucket(resp) - return resp - - class _UpdateCmekSettings(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateCmekSettings") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateCmekSettingsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.CmekSettings: - r"""Call the update cmek settings method over HTTP. - - Args: - request (~.logging_config.UpdateCmekSettingsRequest): - The request object. The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - - See `Enabling CMEK for Logs - Router `__ - for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.CmekSettings: - Describes the customer-managed encryption key (CMEK) - settings associated with a project, folder, - organization, billing account, or flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=*/*}/cmekSettings', - 'body': 'cmek_settings', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*}/cmekSettings', - 'body': 'cmek_settings', - }, - ] - request, metadata = self._interceptor.pre_update_cmek_settings(request, metadata) - pb_request = logging_config.UpdateCmekSettingsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.CmekSettings() - pb_resp = logging_config.CmekSettings.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_cmek_settings(resp) - return resp - - class _UpdateExclusion(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateExclusion") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogExclusion: - r"""Call the update exclusion method over HTTP. - - Args: - request (~.logging_config.UpdateExclusionRequest): - The request object. The parameters to ``UpdateExclusion``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=*/*/exclusions/*}', - 'body': 'exclusion', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/exclusions/*}', - 'body': 'exclusion', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/exclusions/*}', - 'body': 'exclusion', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=folders/*/exclusions/*}', - 'body': 'exclusion', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=billingAccounts/*/exclusions/*}', - 'body': 'exclusion', - }, - ] - request, metadata = self._interceptor.pre_update_exclusion(request, metadata) - pb_request = logging_config.UpdateExclusionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogExclusion() - pb_resp = logging_config.LogExclusion.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_exclusion(resp) - return resp - - class _UpdateSink(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateSink") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogSink: - r"""Call the update sink method over HTTP. - - Args: - request (~.logging_config.UpdateSinkRequest): - The request object. The parameters to ``UpdateSink``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'put', - 'uri': '/v2/{sink_name=*/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'put', - 'uri': '/v2/{sink_name=projects/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'put', - 'uri': '/v2/{sink_name=organizations/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'put', - 'uri': '/v2/{sink_name=folders/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'put', - 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'patch', - 'uri': '/v2/{sink_name=projects/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'patch', - 'uri': '/v2/{sink_name=organizations/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'patch', - 'uri': '/v2/{sink_name=folders/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'patch', - 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', - 'body': 'sink', - }, - ] - request, metadata = self._interceptor.pre_update_sink(request, metadata) - pb_request = logging_config.UpdateSinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogSink() - pb_resp = logging_config.LogSink.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_sink(resp) - return resp - - class _UpdateView(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateView") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogView: - r"""Call the update view method over HTTP. - - Args: - request (~.logging_config.UpdateViewRequest): - The request object. The parameters to ``UpdateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogView: - Describes a view over logs in a - bucket. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=*/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, - ] - request, metadata = self._interceptor.pre_update_view(request, metadata) - pb_request = logging_config.UpdateViewRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogView() - pb_resp = logging_config.LogView.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_view(resp) - return resp - - @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - logging_config.LogBucket]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - logging_config.LogExclusion]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateExclusion(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - logging_config.LogSink]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateSink(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - logging_config.LogView]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateView(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteExclusion(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteSink(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteView(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - logging_config.LogBucket]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - logging_config.CmekSettings]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCmekSettings(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - logging_config.LogExclusion]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetExclusion(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - logging_config.LogSink]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetSink(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - logging_config.LogView]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetView(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - logging_config.ListBucketsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBuckets(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - logging_config.ListExclusionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListExclusions(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - logging_config.ListSinksResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListSinks(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - logging_config.ListViewsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListViews(self._session, self._host, self._interceptor) # type: ignore - - @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UndeleteBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - logging_config.LogBucket]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - logging_config.CmekSettings]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateCmekSettings(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - logging_config.LogExclusion]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateExclusion(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - logging_config.LogSink]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateSink(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - logging_config.LogView]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateView(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'ConfigServiceV2RestTransport', -) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 698b72276c..8f947ba547 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -36,6 +36,7 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client @@ -194,11 +195,11 @@ async def delete_log(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. .. code-block:: python @@ -229,16 +230,15 @@ async def sample_delete_log(): log_name (:class:`str`): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. @@ -354,19 +354,17 @@ async def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or @@ -422,17 +420,17 @@ async def sample_write_log_entries(): Log entries with timestamps that are more than the `logs retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this @@ -545,35 +543,32 @@ async def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added - to this list. + to this list. A maximum of 100 resources may be + specified in a single request. This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. filter (:class:`str`): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -797,14 +792,12 @@ async def sample_list_logs(): request (Optional[Union[google.cloud.logging_v2.types.ListLogsRequest, dict]]): The request object. The parameters to ListLogs. parent (:class:`str`): - Required. The resource name that owns the logs: + Required. The resource name to list logs for: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -818,6 +811,7 @@ async def sample_list_logs(): Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: Result returned from ListLogs. + Iterating over this object will yield results and resolve additional pages automatically. @@ -971,6 +965,158 @@ def request_generator(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + async def __aenter__(self) -> "LoggingServiceV2AsyncClient": return self diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a77eb555ce..c90848e28c 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -39,10 +39,10 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport -from .transports.rest import LoggingServiceV2RestTransport class LoggingServiceV2ClientMeta(type): @@ -55,7 +55,6 @@ class LoggingServiceV2ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - _transport_registry["rest"] = LoggingServiceV2RestTransport def get_transport_class(cls, label: Optional[str] = None, @@ -306,9 +305,6 @@ def __init__(self, *, transport (Union[str, LoggingServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - NOTE: "rest" transport functionality is currently in a - beta state (preview). We welcome your feedback via an - issue in this library's source repository. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -388,11 +384,11 @@ def delete_log(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. .. code-block:: python @@ -423,16 +419,15 @@ def sample_delete_log(): log_name (str): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. @@ -540,19 +535,17 @@ def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or @@ -608,17 +601,17 @@ def sample_write_log_entries(): Log entries with timestamps that are more than the `logs retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this @@ -722,35 +715,32 @@ def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added - to this list. + to this list. A maximum of 100 resources may be + specified in a single request. This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. filter (str): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -959,14 +949,12 @@ def sample_list_logs(): request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (str): - Required. The resource name that owns the logs: - - :: + Required. The resource name to list logs for: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -980,6 +968,7 @@ def sample_list_logs(): Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: Result returned from ListLogs. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1126,6 +1115,158 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index c1d66e378a..e1fb42a460 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -19,20 +19,15 @@ from .base import LoggingServiceV2Transport from .grpc import LoggingServiceV2GrpcTransport from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport -from .rest import LoggingServiceV2RestTransport -from .rest import LoggingServiceV2RestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry['grpc'] = LoggingServiceV2GrpcTransport _transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport -_transport_registry['rest'] = LoggingServiceV2RestTransport __all__ = ( 'LoggingServiceV2Transport', 'LoggingServiceV2GrpcTransport', 'LoggingServiceV2GrpcAsyncIOTransport', - 'LoggingServiceV2RestTransport', - 'LoggingServiceV2RestInterceptor', ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 637083ce90..649b606217 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -27,6 +27,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -261,6 +262,33 @@ def tail_log_entries(self) -> Callable[ ]]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index fdaa104c76..b24dd885af 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -25,6 +25,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO @@ -232,11 +233,11 @@ def delete_log(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. Returns: Callable[[~.DeleteLogRequest], @@ -404,6 +405,60 @@ def tail_log_entries(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 0f1aaa2223..9454a1f27b 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -25,6 +25,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport @@ -235,11 +236,11 @@ def delete_log(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. Returns: Callable[[~.DeleteLogRequest], @@ -407,6 +408,60 @@ def tail_log_entries(self) -> Callable[ def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ( 'LoggingServiceV2GrpcAsyncIOTransport', diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py deleted file mode 100755 index b77514b574..0000000000 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py +++ /dev/null @@ -1,769 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 # type: ignore - -from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class LoggingServiceV2RestInterceptor: - """Interceptor for LoggingServiceV2. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the LoggingServiceV2RestTransport. - - .. code-block:: python - class MyCustomLoggingServiceV2Interceptor(LoggingServiceV2RestInterceptor): - def pre_delete_log(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_list_log_entries(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_log_entries(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_logs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_logs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_monitored_resource_descriptors(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_monitored_resource_descriptors(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_write_log_entries(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_write_log_entries(self, response): - logging.log(f"Received response: {response}") - return response - - transport = LoggingServiceV2RestTransport(interceptor=MyCustomLoggingServiceV2Interceptor()) - client = LoggingServiceV2Client(transport=transport) - - - """ - def pre_delete_log(self, request: logging.DeleteLogRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.DeleteLogRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_log - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def pre_list_log_entries(self, request: logging.ListLogEntriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.ListLogEntriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_log_entries - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def post_list_log_entries(self, response: logging.ListLogEntriesResponse) -> logging.ListLogEntriesResponse: - """Post-rpc interceptor for list_log_entries - - Override in a subclass to manipulate the response - after it is returned by the LoggingServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_logs(self, request: logging.ListLogsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.ListLogsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_logs - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def post_list_logs(self, response: logging.ListLogsResponse) -> logging.ListLogsResponse: - """Post-rpc interceptor for list_logs - - Override in a subclass to manipulate the response - after it is returned by the LoggingServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_monitored_resource_descriptors(self, request: logging.ListMonitoredResourceDescriptorsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.ListMonitoredResourceDescriptorsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_monitored_resource_descriptors - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def post_list_monitored_resource_descriptors(self, response: logging.ListMonitoredResourceDescriptorsResponse) -> logging.ListMonitoredResourceDescriptorsResponse: - """Post-rpc interceptor for list_monitored_resource_descriptors - - Override in a subclass to manipulate the response - after it is returned by the LoggingServiceV2 server but before - it is returned to user code. - """ - return response - def pre_write_log_entries(self, request: logging.WriteLogEntriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.WriteLogEntriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for write_log_entries - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def post_write_log_entries(self, response: logging.WriteLogEntriesResponse) -> logging.WriteLogEntriesResponse: - """Post-rpc interceptor for write_log_entries - - Override in a subclass to manipulate the response - after it is returned by the LoggingServiceV2 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class LoggingServiceV2RestStub: - _session: AuthorizedSession - _host: str - _interceptor: LoggingServiceV2RestInterceptor - - -class LoggingServiceV2RestTransport(LoggingServiceV2Transport): - """REST backend transport for LoggingServiceV2. - - Service for ingesting and querying logs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! - """ - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[LoggingServiceV2RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or LoggingServiceV2RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _DeleteLog(LoggingServiceV2RestStub): - def __hash__(self): - return hash("DeleteLog") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging.DeleteLogRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete log method over HTTP. - - Args: - request (~.logging.DeleteLogRequest): - The request object. The parameters to DeleteLog. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{log_name=projects/*/logs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{log_name=*/*/logs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{log_name=organizations/*/logs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{log_name=folders/*/logs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{log_name=billingAccounts/*/logs/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_log(request, metadata) - pb_request = logging.DeleteLogRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _ListLogEntries(LoggingServiceV2RestStub): - def __hash__(self): - return hash("ListLogEntries") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging.ListLogEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging.ListLogEntriesResponse: - r"""Call the list log entries method over HTTP. - - Args: - request (~.logging.ListLogEntriesRequest): - The request object. The parameters to ``ListLogEntries``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging.ListLogEntriesResponse: - Result returned from ``ListLogEntries``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/entries:list', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_list_log_entries(request, metadata) - pb_request = logging.ListLogEntriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging.ListLogEntriesResponse() - pb_resp = logging.ListLogEntriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_log_entries(resp) - return resp - - class _ListLogs(LoggingServiceV2RestStub): - def __hash__(self): - return hash("ListLogs") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging.ListLogsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging.ListLogsResponse: - r"""Call the list logs method over HTTP. - - Args: - request (~.logging.ListLogsRequest): - The request object. The parameters to ListLogs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging.ListLogsResponse: - Result returned from ListLogs. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*}/logs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/logs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/logs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*}/logs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*}/logs', - }, - ] - request, metadata = self._interceptor.pre_list_logs(request, metadata) - pb_request = logging.ListLogsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging.ListLogsResponse() - pb_resp = logging.ListLogsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_logs(resp) - return resp - - class _ListMonitoredResourceDescriptors(LoggingServiceV2RestStub): - def __hash__(self): - return hash("ListMonitoredResourceDescriptors") - - def __call__(self, - request: logging.ListMonitoredResourceDescriptorsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging.ListMonitoredResourceDescriptorsResponse: - r"""Call the list monitored resource - descriptors method over HTTP. - - Args: - request (~.logging.ListMonitoredResourceDescriptorsRequest): - The request object. The parameters to - ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging.ListMonitoredResourceDescriptorsResponse: - Result returned from - ListMonitoredResourceDescriptors. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/monitoredResourceDescriptors', - }, - ] - request, metadata = self._interceptor.pre_list_monitored_resource_descriptors(request, metadata) - pb_request = logging.ListMonitoredResourceDescriptorsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging.ListMonitoredResourceDescriptorsResponse() - pb_resp = logging.ListMonitoredResourceDescriptorsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_monitored_resource_descriptors(resp) - return resp - - class _TailLogEntries(LoggingServiceV2RestStub): - def __hash__(self): - return hash("TailLogEntries") - - def __call__(self, - request: logging.TailLogEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> rest_streaming.ResponseIterator: - raise NotImplementedError( - "Method TailLogEntries is not available over REST transport" - ) - class _WriteLogEntries(LoggingServiceV2RestStub): - def __hash__(self): - return hash("WriteLogEntries") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging.WriteLogEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging.WriteLogEntriesResponse: - r"""Call the write log entries method over HTTP. - - Args: - request (~.logging.WriteLogEntriesRequest): - The request object. The parameters to WriteLogEntries. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging.WriteLogEntriesResponse: - Result returned from WriteLogEntries. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/entries:write', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_write_log_entries(request, metadata) - pb_request = logging.WriteLogEntriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging.WriteLogEntriesResponse() - pb_resp = logging.WriteLogEntriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_write_log_entries(resp) - return resp - - @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteLog(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - logging.ListLogEntriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLogEntries(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - logging.ListLogsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLogs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - logging.ListMonitoredResourceDescriptorsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListMonitoredResourceDescriptors(self._session, self._host, self._interceptor) # type: ignore - - @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - logging.TailLogEntriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TailLogEntries(self._session, self._host, self._interceptor) # type: ignore - - @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - logging.WriteLogEntriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._WriteLogEntries(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'LoggingServiceV2RestTransport', -) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index ffcebb38ae..5d160dc58b 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -36,6 +36,7 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -247,6 +248,7 @@ async def sample_list_log_metrics(): Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: Result returned from ListLogMetrics. + Iterating over this object will yield results and resolve additional pages automatically. @@ -372,6 +374,7 @@ async def sample_get_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -509,6 +512,7 @@ async def sample_create_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -639,6 +643,7 @@ async def sample_update_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -799,6 +804,158 @@ async def sample_delete_log_metric(): metadata=metadata, ) + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + async def __aenter__(self) -> "MetricsServiceV2AsyncClient": return self diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 09e173e72d..50ec7c3995 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -39,11 +39,11 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport -from .transports.rest import MetricsServiceV2RestTransport class MetricsServiceV2ClientMeta(type): @@ -56,7 +56,6 @@ class MetricsServiceV2ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - _transport_registry["rest"] = MetricsServiceV2RestTransport def get_transport_class(cls, label: Optional[str] = None, @@ -307,9 +306,6 @@ def __init__(self, *, transport (Union[str, MetricsServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - NOTE: "rest" transport functionality is currently in a - beta state (preview). We welcome your feedback via an - issue in this library's source repository. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -441,6 +437,7 @@ def sample_list_log_metrics(): Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: Result returned from ListLogMetrics. + Iterating over this object will yield results and resolve additional pages automatically. @@ -558,6 +555,7 @@ def sample_get_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -687,6 +685,7 @@ def sample_create_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -817,6 +816,7 @@ def sample_update_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -974,6 +974,158 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 7f421d9059..07d010436a 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -19,20 +19,15 @@ from .base import MetricsServiceV2Transport from .grpc import MetricsServiceV2GrpcTransport from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport -from .rest import MetricsServiceV2RestTransport -from .rest import MetricsServiceV2RestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry['grpc'] = MetricsServiceV2GrpcTransport _transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport -_transport_registry['rest'] = MetricsServiceV2RestTransport __all__ = ( 'MetricsServiceV2Transport', 'MetricsServiceV2GrpcTransport', 'MetricsServiceV2GrpcAsyncIOTransport', - 'MetricsServiceV2RestTransport', - 'MetricsServiceV2RestInterceptor', ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index a67ba7fd89..5ad58024fb 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -27,6 +27,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -231,6 +232,33 @@ def delete_log_metric(self) -> Callable[ ]]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 1111621606..d6cf4e2522 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -25,6 +25,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO @@ -359,6 +360,60 @@ def delete_log_metric(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 3ddcc80df5..1b81c982e6 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -25,6 +25,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport @@ -362,6 +363,60 @@ def delete_log_metric(self) -> Callable[ def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ( 'MetricsServiceV2GrpcAsyncIOTransport', diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py deleted file mode 100755 index c68edad36d..0000000000 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py +++ /dev/null @@ -1,753 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 # type: ignore - -from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class MetricsServiceV2RestInterceptor: - """Interceptor for MetricsServiceV2. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the MetricsServiceV2RestTransport. - - .. code-block:: python - class MyCustomMetricsServiceV2Interceptor(MetricsServiceV2RestInterceptor): - def pre_create_log_metric(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_log_metric(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_log_metric(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_log_metric(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_log_metric(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_log_metrics(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_log_metrics(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_log_metric(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_log_metric(self, response): - logging.log(f"Received response: {response}") - return response - - transport = MetricsServiceV2RestTransport(interceptor=MyCustomMetricsServiceV2Interceptor()) - client = MetricsServiceV2Client(transport=transport) - - - """ - def pre_create_log_metric(self, request: logging_metrics.CreateLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.CreateLogMetricRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_log_metric - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def post_create_log_metric(self, response: logging_metrics.LogMetric) -> logging_metrics.LogMetric: - """Post-rpc interceptor for create_log_metric - - Override in a subclass to manipulate the response - after it is returned by the MetricsServiceV2 server but before - it is returned to user code. - """ - return response - def pre_delete_log_metric(self, request: logging_metrics.DeleteLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.DeleteLogMetricRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_log_metric - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def pre_get_log_metric(self, request: logging_metrics.GetLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.GetLogMetricRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_log_metric - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def post_get_log_metric(self, response: logging_metrics.LogMetric) -> logging_metrics.LogMetric: - """Post-rpc interceptor for get_log_metric - - Override in a subclass to manipulate the response - after it is returned by the MetricsServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_log_metrics(self, request: logging_metrics.ListLogMetricsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.ListLogMetricsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_log_metrics - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def post_list_log_metrics(self, response: logging_metrics.ListLogMetricsResponse) -> logging_metrics.ListLogMetricsResponse: - """Post-rpc interceptor for list_log_metrics - - Override in a subclass to manipulate the response - after it is returned by the MetricsServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_log_metric(self, request: logging_metrics.UpdateLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.UpdateLogMetricRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_log_metric - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def post_update_log_metric(self, response: logging_metrics.LogMetric) -> logging_metrics.LogMetric: - """Post-rpc interceptor for update_log_metric - - Override in a subclass to manipulate the response - after it is returned by the MetricsServiceV2 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class MetricsServiceV2RestStub: - _session: AuthorizedSession - _host: str - _interceptor: MetricsServiceV2RestInterceptor - - -class MetricsServiceV2RestTransport(MetricsServiceV2Transport): - """REST backend transport for MetricsServiceV2. - - Service for configuring logs-based metrics. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! - """ - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MetricsServiceV2RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or MetricsServiceV2RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateLogMetric(MetricsServiceV2RestStub): - def __hash__(self): - return hash("CreateLogMetric") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.CreateLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_metrics.LogMetric: - r"""Call the create log metric method over HTTP. - - Args: - request (~.logging_metrics.CreateLogMetricRequest): - The request object. The parameters to CreateLogMetric. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_metrics.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/metrics', - 'body': 'metric', - }, - ] - request, metadata = self._interceptor.pre_create_log_metric(request, metadata) - pb_request = logging_metrics.CreateLogMetricRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_metrics.LogMetric() - pb_resp = logging_metrics.LogMetric.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_log_metric(resp) - return resp - - class _DeleteLogMetric(MetricsServiceV2RestStub): - def __hash__(self): - return hash("DeleteLogMetric") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.DeleteLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete log metric method over HTTP. - - Args: - request (~.logging_metrics.DeleteLogMetricRequest): - The request object. The parameters to DeleteLogMetric. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{metric_name=projects/*/metrics/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_log_metric(request, metadata) - pb_request = logging_metrics.DeleteLogMetricRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetLogMetric(MetricsServiceV2RestStub): - def __hash__(self): - return hash("GetLogMetric") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.GetLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_metrics.LogMetric: - r"""Call the get log metric method over HTTP. - - Args: - request (~.logging_metrics.GetLogMetricRequest): - The request object. The parameters to GetLogMetric. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_metrics.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{metric_name=projects/*/metrics/*}', - }, - ] - request, metadata = self._interceptor.pre_get_log_metric(request, metadata) - pb_request = logging_metrics.GetLogMetricRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_metrics.LogMetric() - pb_resp = logging_metrics.LogMetric.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_log_metric(resp) - return resp - - class _ListLogMetrics(MetricsServiceV2RestStub): - def __hash__(self): - return hash("ListLogMetrics") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.ListLogMetricsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_metrics.ListLogMetricsResponse: - r"""Call the list log metrics method over HTTP. - - Args: - request (~.logging_metrics.ListLogMetricsRequest): - The request object. The parameters to ListLogMetrics. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_metrics.ListLogMetricsResponse: - Result returned from ListLogMetrics. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/metrics', - }, - ] - request, metadata = self._interceptor.pre_list_log_metrics(request, metadata) - pb_request = logging_metrics.ListLogMetricsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_metrics.ListLogMetricsResponse() - pb_resp = logging_metrics.ListLogMetricsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_log_metrics(resp) - return resp - - class _UpdateLogMetric(MetricsServiceV2RestStub): - def __hash__(self): - return hash("UpdateLogMetric") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.UpdateLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_metrics.LogMetric: - r"""Call the update log metric method over HTTP. - - Args: - request (~.logging_metrics.UpdateLogMetricRequest): - The request object. The parameters to UpdateLogMetric. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_metrics.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'put', - 'uri': '/v2/{metric_name=projects/*/metrics/*}', - 'body': 'metric', - }, - ] - request, metadata = self._interceptor.pre_update_log_metric(request, metadata) - pb_request = logging_metrics.UpdateLogMetricRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_metrics.LogMetric() - pb_resp = logging_metrics.LogMetric.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_log_metric(resp) - return resp - - @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - logging_metrics.LogMetric]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateLogMetric(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteLogMetric(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - logging_metrics.LogMetric]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetLogMetric(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - logging_metrics.ListLogMetricsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLogMetrics(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - logging_metrics.LogMetric]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateLogMetric(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'MetricsServiceV2RestTransport', -) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py index 29636d30eb..64298ee56e 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py @@ -17,6 +17,7 @@ LogEntry, LogEntryOperation, LogEntrySourceLocation, + LogSplit, ) from .logging import ( DeleteLogRequest, @@ -33,40 +34,59 @@ WriteLogEntriesResponse, ) from .logging_config import ( + BigQueryDataset, BigQueryOptions, + BucketMetadata, CmekSettings, + CopyLogEntriesMetadata, + CopyLogEntriesRequest, + CopyLogEntriesResponse, CreateBucketRequest, CreateExclusionRequest, + CreateLinkRequest, CreateSinkRequest, CreateViewRequest, DeleteBucketRequest, DeleteExclusionRequest, + DeleteLinkRequest, DeleteSinkRequest, DeleteViewRequest, GetBucketRequest, GetCmekSettingsRequest, GetExclusionRequest, + GetLinkRequest, + GetSettingsRequest, GetSinkRequest, GetViewRequest, + IndexConfig, + Link, + LinkMetadata, ListBucketsRequest, ListBucketsResponse, ListExclusionsRequest, ListExclusionsResponse, + ListLinksRequest, + ListLinksResponse, ListSinksRequest, ListSinksResponse, ListViewsRequest, ListViewsResponse, + LocationMetadata, LogBucket, LogExclusion, LogSink, LogView, + Settings, UndeleteBucketRequest, UpdateBucketRequest, UpdateCmekSettingsRequest, UpdateExclusionRequest, + UpdateSettingsRequest, UpdateSinkRequest, UpdateViewRequest, + IndexType, LifecycleState, + OperationState, ) from .logging_metrics import ( CreateLogMetricRequest, @@ -82,6 +102,7 @@ 'LogEntry', 'LogEntryOperation', 'LogEntrySourceLocation', + 'LogSplit', 'DeleteLogRequest', 'ListLogEntriesRequest', 'ListLogEntriesResponse', @@ -94,40 +115,59 @@ 'WriteLogEntriesPartialErrors', 'WriteLogEntriesRequest', 'WriteLogEntriesResponse', + 'BigQueryDataset', 'BigQueryOptions', + 'BucketMetadata', 'CmekSettings', + 'CopyLogEntriesMetadata', + 'CopyLogEntriesRequest', + 'CopyLogEntriesResponse', 'CreateBucketRequest', 'CreateExclusionRequest', + 'CreateLinkRequest', 'CreateSinkRequest', 'CreateViewRequest', 'DeleteBucketRequest', 'DeleteExclusionRequest', + 'DeleteLinkRequest', 'DeleteSinkRequest', 'DeleteViewRequest', 'GetBucketRequest', 'GetCmekSettingsRequest', 'GetExclusionRequest', + 'GetLinkRequest', + 'GetSettingsRequest', 'GetSinkRequest', 'GetViewRequest', + 'IndexConfig', + 'Link', + 'LinkMetadata', 'ListBucketsRequest', 'ListBucketsResponse', 'ListExclusionsRequest', 'ListExclusionsResponse', + 'ListLinksRequest', + 'ListLinksResponse', 'ListSinksRequest', 'ListSinksResponse', 'ListViewsRequest', 'ListViewsResponse', + 'LocationMetadata', 'LogBucket', 'LogExclusion', 'LogSink', 'LogView', + 'Settings', 'UndeleteBucketRequest', 'UpdateBucketRequest', 'UpdateCmekSettingsRequest', 'UpdateExclusionRequest', + 'UpdateSettingsRequest', 'UpdateSinkRequest', 'UpdateViewRequest', + 'IndexType', 'LifecycleState', + 'OperationState', 'CreateLogMetricRequest', 'DeleteLogMetricRequest', 'GetLogMetricRequest', diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index 040a12549b..f6063ad00a 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -33,6 +33,7 @@ 'LogEntry', 'LogEntryOperation', 'LogEntrySourceLocation', + 'LogSplit', }, ) @@ -67,6 +68,7 @@ class LogEntry(proto.Message): ``[LOG_ID]`` must be URL-encoded within ``log_name``. Example: ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``[LOG_ID]`` must be less than 512 characters long and can only include the following characters: upper and lower case alphanumeric characters, forward-slash, underscore, hyphen, @@ -74,7 +76,7 @@ class LogEntry(proto.Message): For backward compatibility, if ``log_name`` begins with a forward-slash, such as ``/projects/...``, then the log entry - is ingested as usual but the forward-slash is removed. + is ingested as usual, but the forward-slash is removed. Listing the log entry will not show the leading slash and filtering for a log name with a leading slash will never return any results. @@ -90,6 +92,7 @@ class LogEntry(proto.Message): protocol buffer. Some Google Cloud Platform services use this field for their log entry payloads. + The following protocol buffer types are supported; user-defined types are not supported: @@ -147,25 +150,74 @@ class LogEntry(proto.Message): Optional. Information about the HTTP request associated with this log entry, if applicable. labels (MutableMapping[str, str]): - Optional. A set of user-defined (key, value) - data that provides additional information about - the log entry. + Optional. A map of key, value pairs that provides additional + information about the log entry. The labels can be + user-defined or system-defined. + + User-defined labels are arbitrary key, value pairs that you + can use to classify logs. + + System-defined labels are defined by GCP services for + platform logs. They have two components - a service + namespace component and the attribute name. For example: + ``compute.googleapis.com/resource_name``. + + Cloud Logging truncates label keys that exceed 512 B and + label values that exceed 64 KB upon their associated log + entry being written. The truncation is indicated by an + ellipsis at the end of the character string. operation (google.cloud.logging_v2.types.LogEntryOperation): Optional. Information about an operation associated with the log entry, if applicable. trace (str): - Optional. Resource name of the trace associated with the log - entry, if any. If it contains a relative resource name, the - name is assumed to be relative to - ``//tracing.googleapis.com``. Example: - ``projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824`` + Optional. The REST resource name of the trace being written + to `Cloud Trace `__ in + association with this log entry. For example, if your trace + data is stored in the Cloud project "my-trace-project" and + if the service that is creating the log entry receives a + trace header that includes the trace ID "12345", then the + service should use + "projects/my-tracing-project/traces/12345". + + The ``trace`` field provides the link between logs and + traces. By using this field, you can navigate from a log + entry to a trace. span_id (str): - Optional. The span ID within the trace associated with the - log entry. - - For Trace spans, this is the same format that the Trace API - v2 uses: a 16-character hexadecimal encoding of an 8-byte - array, such as ``000000000000004a``. + Optional. The ID of the `Cloud + Trace `__ span associated + with the current operation in which the log is being + written. For example, if a span has the REST resource name + of + "projects/some-project/traces/some-trace/spans/some-span-id", + then the ``span_id`` field is "some-span-id". + + A + `Span `__ + represents a single operation within a trace. Whereas a + trace may involve multiple different microservices running + on multiple different machines, a span generally corresponds + to a single logical operation being performed in a single + instance of a microservice on one specific machine. Spans + are the nodes within the tree that is a trace. + + Applications that are `instrumented for + tracing `__ will + generally assign a new, unique span ID on each incoming + request. It is also common to create and record additional + spans corresponding to internal processing elements as well + as issuing requests to dependencies. + + The span ID is expected to be a 16-character, hexadecimal + encoding of an 8-byte array and should not be zero. It + should be unique within the trace and should, ideally, be + generated in a manner that is uniformly random. + + Example values: + + - ``000000000000004a`` + - ``7a2190356c3fc94b`` + - ``0000f00300090021`` + - ``d39223e101960076`` trace_sampled (bool): Optional. The sampling decision of the trace associated with the log entry. @@ -179,6 +231,10 @@ class LogEntry(proto.Message): source_location (google.cloud.logging_v2.types.LogEntrySourceLocation): Optional. Source code location information associated with the log entry, if any. + split (google.cloud.logging_v2.types.LogSplit): + Optional. Information indicating this + LogEntry is part of a sequence of multiple log + entries split from a single LogEntry. """ log_name: str = proto.Field( @@ -258,6 +314,11 @@ class LogEntry(proto.Message): number=23, message='LogEntrySourceLocation', ) + split: 'LogSplit' = proto.Field( + proto.MESSAGE, + number=35, + message='LogSplit', + ) class LogEntryOperation(proto.Message): @@ -336,4 +397,39 @@ class LogEntrySourceLocation(proto.Message): ) +class LogSplit(proto.Message): + r"""Additional information used to correlate multiple log + entries. Used when a single LogEntry would exceed the Google + Cloud Logging size limit and is split across multiple log + entries. + + Attributes: + uid (str): + A globally unique identifier for all log entries in a + sequence of split log entries. All log entries with the same + \|LogSplit.uid\| are assumed to be part of the same sequence + of split log entries. + index (int): + The index of this LogEntry in the sequence of split log + entries. Log entries are given \|index\| values 0, 1, ..., + n-1 for a sequence of n log entries. + total_splits (int): + The total number of log entries that the + original LogEntry was split into. + """ + + uid: str = proto.Field( + proto.STRING, + number=1, + ) + index: int = proto.Field( + proto.INT32, + number=2, + ) + total_splits: int = proto.Field( + proto.INT32, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index fffc74c192..715909e87a 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -51,16 +51,15 @@ class DeleteLogRequest(proto.Message): log_name (str): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. """ @@ -80,19 +79,17 @@ class WriteLogEntriesRequest(proto.Message): all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or folder that @@ -135,25 +132,27 @@ class WriteLogEntriesRequest(proto.Message): Log entries with timestamps that are more than the `logs retention - period `__ in - the past or more than 24 hours in the future will not be + period `__ in the + past or more than 24 hours in the future will not be available when calling ``entries.list``. However, those log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ for - calls to ``entries.write``, you should try to include - several log entries in this list, rather than calling this - method for each individual log entry. + limit `__ for calls + to ``entries.write``, you should try to include several log + entries in this list, rather than calling this method for + each individual log entry. partial_success (bool): - Optional. Whether valid entries should be written even if - some other entries fail due to INVALID_ARGUMENT or - PERMISSION_DENIED errors. If any entry is not written, then - the response status is the error associated with one of the - failed entries and the response includes error details keyed - by the entries' zero-based index in the ``entries.write`` - method. + Optional. Whether a batch's valid entries should be written + even if some other entry failed due to a permanent error + such as INVALID_ARGUMENT or PERMISSION_DENIED. If any entry + failed, then the response status is the response status of + one of the failed entries. The response will include error + details in ``WriteLogEntriesPartialErrors.log_entry_errors`` + keyed by the entries' zero-based index in the ``entries``. + Failed requests for which no entries are written will not + include per-entry errors. dry_run (bool): Optional. If true, the request should expect normal response, but the entries won't be @@ -226,31 +225,28 @@ class ListLogEntriesRequest(proto.Message): Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to - this list. + this list. A maximum of 100 resources may be specified in a + single request. filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not listed in ``resource_names`` will cause the filter to - return no results. The maximum length of the filter is 20000 - characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. order_by (str): Optional. How the results should be sorted. Presently, the only permitted values are ``"timestamp asc"`` (default) and @@ -396,14 +392,29 @@ class ListLogsRequest(proto.Message): Attributes: parent (str): - Required. The resource name that owns the logs: + Required. The resource name to list logs for: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + resource_names (MutableSequence[str]): + Optional. List of resource names to list logs for: + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + + To support legacy queries, it could also be: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + The resource name in the ``parent`` field is added to this + list. page_size (int): Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of @@ -415,22 +426,16 @@ class ListLogsRequest(proto.Message): ``pageToken`` must be the value of ``nextPageToken`` from the previous response. The values of other method parameters should be identical to those in the previous call. - resource_names (MutableSequence[str]): - Optional. The resource name that owns the logs: - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - - To support legacy queries, it could also be: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". """ parent: str = proto.Field( proto.STRING, number=1, ) + resource_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) page_size: int = proto.Field( proto.INT32, number=2, @@ -439,10 +444,6 @@ class ListLogsRequest(proto.Message): proto.STRING, number=3, ) - resource_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) class ListLogsResponse(proto.Message): @@ -482,28 +483,24 @@ class TailLogEntriesRequest(proto.Message): Required. Name of a parent resource from which to retrieve log entries: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]". + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Filters `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not in ``resource_names`` will cause the filter to return no - results. The maximum length of the filter is 20000 - characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. buffer_window (google.protobuf.duration_pb2.Duration): Optional. The amount of time to buffer log entries at the server before being returned to diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index a7ef42ec82..0df028c0b9 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -26,10 +26,15 @@ __protobuf__ = proto.module( package='google.logging.v2', manifest={ + 'OperationState', 'LifecycleState', + 'IndexType', + 'IndexConfig', 'LogBucket', 'LogView', 'LogSink', + 'BigQueryDataset', + 'Link', 'BigQueryOptions', 'ListBucketsRequest', 'ListBucketsResponse', @@ -50,6 +55,11 @@ 'CreateSinkRequest', 'UpdateSinkRequest', 'DeleteSinkRequest', + 'CreateLinkRequest', + 'DeleteLinkRequest', + 'ListLinksRequest', + 'ListLinksResponse', + 'GetLinkRequest', 'LogExclusion', 'ListExclusionsRequest', 'ListExclusionsResponse', @@ -60,40 +70,158 @@ 'GetCmekSettingsRequest', 'UpdateCmekSettingsRequest', 'CmekSettings', + 'GetSettingsRequest', + 'UpdateSettingsRequest', + 'Settings', + 'CopyLogEntriesRequest', + 'CopyLogEntriesMetadata', + 'CopyLogEntriesResponse', + 'BucketMetadata', + 'LinkMetadata', + 'LocationMetadata', }, ) +class OperationState(proto.Enum): + r"""List of different operation states. + High level state of the operation. This is used to report the + job's current state to the user. Once a long running operation + is created, the current state of the operation can be queried + even before the operation is finished and the final result is + available. + + Values: + OPERATION_STATE_UNSPECIFIED (0): + Should not be used. + OPERATION_STATE_SCHEDULED (1): + The operation is scheduled. + OPERATION_STATE_WAITING_FOR_PERMISSIONS (2): + Waiting for necessary permissions. + OPERATION_STATE_RUNNING (3): + The operation is running. + OPERATION_STATE_SUCCEEDED (4): + The operation was completed successfully. + OPERATION_STATE_FAILED (5): + The operation failed. + OPERATION_STATE_CANCELLED (6): + The operation was cancelled by the user. + """ + OPERATION_STATE_UNSPECIFIED = 0 + OPERATION_STATE_SCHEDULED = 1 + OPERATION_STATE_WAITING_FOR_PERMISSIONS = 2 + OPERATION_STATE_RUNNING = 3 + OPERATION_STATE_SUCCEEDED = 4 + OPERATION_STATE_FAILED = 5 + OPERATION_STATE_CANCELLED = 6 + + class LifecycleState(proto.Enum): r"""LogBucket lifecycle states. Values: LIFECYCLE_STATE_UNSPECIFIED (0): - Unspecified state. This is only used/useful + Unspecified state. This is only used/useful for distinguishing unset values. ACTIVE (1): The normal and active state. DELETE_REQUESTED (2): - The bucket has been marked for deletion by - the user. + The resource has been marked for deletion by + the user. For some resources (e.g. buckets), + this can be reversed by an un-delete operation. + UPDATING (3): + The resource has been marked for an update by + the user. It will remain in this state until the + update is complete. + CREATING (4): + The resource has been marked for creation by + the user. It will remain in this state until the + creation is complete. + FAILED (5): + The resource is in an INTERNAL error state. """ LIFECYCLE_STATE_UNSPECIFIED = 0 ACTIVE = 1 DELETE_REQUESTED = 2 + UPDATING = 3 + CREATING = 4 + FAILED = 5 + + +class IndexType(proto.Enum): + r"""IndexType is used for custom indexing. It describes the type + of an indexed field. + + Values: + INDEX_TYPE_UNSPECIFIED (0): + The index's type is unspecified. + INDEX_TYPE_STRING (1): + The index is a string-type index. + INDEX_TYPE_INTEGER (2): + The index is a integer-type index. + """ + INDEX_TYPE_UNSPECIFIED = 0 + INDEX_TYPE_STRING = 1 + INDEX_TYPE_INTEGER = 2 + + +class IndexConfig(proto.Message): + r"""Configuration for an indexed field. + + Attributes: + field_path (str): + Required. The LogEntry field path to index. + + Note that some paths are automatically indexed, and other + paths are not eligible for indexing. See `indexing + documentation `__ + for details. + + For example: ``jsonPayload.request.status`` + type_ (google.cloud.logging_v2.types.IndexType): + Required. The type of data in this index. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the index was + last modified. + This is used to return the timestamp, and will + be ignored if supplied during update. + """ + + field_path: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'IndexType' = proto.Field( + proto.ENUM, + number=2, + enum='IndexType', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) class LogBucket(proto.Message): - r"""Describes a repository of logs. + r"""Describes a repository in which log entries are stored. Attributes: name (str): - The resource name of the bucket. For example: - "projects/my-project-id/locations/my-location/buckets/my-bucket-id - The supported locations are: "global" + Output only. The resource name of the bucket. + + For example: - For the location of ``global`` it is unspecified where logs - are actually stored. Once a bucket has been created, the - location can not be changed. + ``projects/my-project/locations/global/buckets/my-bucket`` + + For a list of supported locations, see `Supported + Regions `__ + + For the location of ``global`` it is unspecified where log + entries are actually stored. + + After a bucket has been created, the location cannot be + changed. description (str): Describes this bucket. create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -111,12 +239,39 @@ class LogBucket(proto.Message): bucket creation time, the default time of 30 days will be used. locked (bool): - Whether the bucket has been locked. - The retention period on a locked bucket may not + Whether the bucket is locked. + + The retention period on a locked bucket cannot be changed. Locked buckets may only be deleted if they are empty. lifecycle_state (google.cloud.logging_v2.types.LifecycleState): Output only. The bucket lifecycle state. + analytics_enabled (bool): + Whether log analytics is enabled for this + bucket. + Once enabled, log analytics features cannot be + disabled. + restricted_fields (MutableSequence[str]): + Log entry field paths that are denied access in this bucket. + + The following fields and their children are eligible: + ``textPayload``, ``jsonPayload``, ``protoPayload``, + ``httpRequest``, ``labels``, ``sourceLocation``. + + Restricting a repeated field will restrict all values. + Adding a parent will block all child fields. (e.g. + ``foo.bar`` will block ``foo.bar.baz``) + index_configs (MutableSequence[google.cloud.logging_v2.types.IndexConfig]): + A list of indexed fields and related + configuration data. + cmek_settings (google.cloud.logging_v2.types.CmekSettings): + The CMEK settings of the log bucket. If + present, new log entries written to this log + bucket are encrypted using the CMEK key provided + in this configuration. If a log bucket has CMEK + settings, the CMEK settings cannot be disabled + later by updating the log bucket. Changing the + KMS key is allowed. """ name: str = proto.Field( @@ -150,16 +305,36 @@ class LogBucket(proto.Message): number=12, enum='LifecycleState', ) + analytics_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) + restricted_fields: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + index_configs: MutableSequence['IndexConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message='IndexConfig', + ) + cmek_settings: 'CmekSettings' = proto.Field( + proto.MESSAGE, + number=19, + message='CmekSettings', + ) class LogView(proto.Message): - r"""Describes a view over logs in a bucket. + r"""Describes a view over log entries in a bucket. Attributes: name (str): The resource name of the view. - For example - "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket/views/my-view`` description (str): Describes this view. create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -170,11 +345,19 @@ class LogView(proto.Message): view. filter (str): Filter that restricts which log entries in a bucket are - visible in this view. Filters are restricted to be a logical - AND of ==/!= of any of the following: originating - project/folder/organization/billing account. resource type - log id Example: SOURCE("projects/myproject") AND - resource.type = "gce_instance" AND LOG_ID("stdout") + visible in this view. + + Filters are restricted to be a logical AND of ==/!= of any + of the following: + + - originating project/folder/organization/billing account. + - resource type + - log id + + For example: + + SOURCE("projects/myproject") AND resource.type = + "gce_instance" AND LOG_ID("stdout") """ name: str = proto.Field( @@ -204,10 +387,10 @@ class LogView(proto.Message): class LogSink(proto.Message): r"""Describes a sink used to export log entries to one of the following destinations in any project: a Cloud Storage bucket, a - BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. The sink must be - created within a project, organization, billing account, or - folder. + BigQuery dataset, a Pub/Sub topic or a Cloud Logging log bucket. + A logs filter controls which log entries are exported. The sink + must be created within a project, organization, billing account, + or folder. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -215,7 +398,9 @@ class LogSink(proto.Message): Attributes: name (str): Required. The client-assigned sink identifier, unique within - the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + the project. + + For example: ``"my-syslog-errors-to-pubsub"``. Sink identifiers are limited to 100 characters and can include only the following characters: upper and lower-case alphanumeric characters, underscores, hyphens, and periods. @@ -238,30 +423,33 @@ class LogSink(proto.Message): Optional. An `advanced logs filter `__. The only exported log entries are those that are in the - resource owning the sink and that match the filter. For - example: + resource owning the sink and that match the filter. - :: + For example: - logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + ``logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR`` description (str): Optional. A description of this sink. + The maximum length of the description is 8000 characters. disabled (bool): - Optional. If set to True, then this sink is + Optional. If set to true, then this sink is disabled and it does not export any log entries. exclusions (MutableSequence[google.cloud.logging_v2.types.LogExclusion]): - Optional. Log entries that match any of the exclusion - filters will not be exported. If a log entry is matched by - both ``filter`` and one of ``exclusion_filters`` it will not - be exported. + Optional. Log entries that match any of these exclusion + filters will not be exported. + + If a log entry is matched by both ``filter`` and one of + ``exclusion_filters`` it will not be exported. output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat): Deprecated. This field is unused. writer_identity (str): Output only. An IAM identity—a service account or - group—under which Logging writes the exported log entries to - the sink's destination. This field is set by + group—under which Cloud Logging writes the exported log + entries to the sink's destination. This field is either set + by specifying ``custom_writer_identity`` or set + automatically by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] @@ -274,25 +462,30 @@ class LogSink(proto.Message): Resource `__. Consult the destination service's documentation to determine the appropriate IAM roles to assign to the identity. + + Sinks that have a destination that is a log bucket in the + same project as the sink cannot have a writer_identity and + no additional permissions are required. include_children (bool): Optional. This field applies only to sinks owned by organizations and folders. If the field is false, the default, only the logs owned by the sink's parent resource - are available for export. If the field is true, then logs - from all the projects, folders, and billing accounts + are available for export. If the field is true, then log + entries from all the projects, folders, and billing accounts contained in the sink's parent resource are also available for export. Whether a particular log entry from the children - is exported depends on the sink's filter expression. For - example, if this field is true, then the filter + is exported depends on the sink's filter expression. + + For example, if this field is true, then the filter ``resource.type=gce_instance`` would export all Compute Engine VM instance log entries from all projects in the - sink's parent. To only export entries from certain child - projects, filter on the project part of the log name: + sink's parent. - :: + To only export entries from certain child projects, filter + on the project part of the log name: - logName:("projects/test-project1/" OR "projects/test-project2/") AND - resource.type=gce_instance + logName:("projects/test-project1/" OR + "projects/test-project2/") AND resource.type=gce_instance bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. @@ -379,6 +572,90 @@ class VersionFormat(proto.Enum): ) +class BigQueryDataset(proto.Message): + r"""Describes a BigQuery dataset that was created by a link. + + Attributes: + dataset_id (str): + Output only. The full resource name of the BigQuery dataset. + The DATASET_ID will match the ID of the link, so the link + must match the naming restrictions of BigQuery datasets + (alphanumeric characters and underscores only). + + The dataset will have a resource path of + "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET_ID]". + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Link(proto.Message): + r"""Describes a link connected to an analytics enabled bucket. + + Attributes: + name (str): + The resource name of the link. The name can have up to 100 + characters. A valid link id (at the end of the link name) + must only have alphanumeric characters and underscores + within it. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + For example: + + \`projects/my-project/locations/global/buckets/my-bucket/links/my_link + description (str): + Describes this link. + + The maximum length of the description is 8000 + characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + link. + lifecycle_state (google.cloud.logging_v2.types.LifecycleState): + Output only. The resource lifecycle state. + bigquery_dataset (google.cloud.logging_v2.types.BigQueryDataset): + The information of a BigQuery Dataset. When a + link is created, a BigQuery dataset is created + along with it, in the same project as the + LogBucket it's linked to. This dataset will also + have BigQuery Views corresponding to the + LogViews in the bucket. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + lifecycle_state: 'LifecycleState' = proto.Field( + proto.ENUM, + number=4, + enum='LifecycleState', + ) + bigquery_dataset: 'BigQueryDataset' = proto.Field( + proto.MESSAGE, + number=5, + message='BigQueryDataset', + ) + + class BigQueryOptions(proto.Message): r"""Options that change functionality of a sink exporting data to BigQuery. @@ -387,18 +664,20 @@ class BigQueryOptions(proto.Message): use_partitioned_tables (bool): Optional. Whether to use `BigQuery's partition tables `__. - By default, Logging creates dated tables based on the log - entries' timestamps, e.g. syslog_20170523. With partitioned - tables the date suffix is no longer present and `special - query + By default, Cloud Logging creates dated tables based on the + log entries' timestamps, e.g. syslog_20170523. With + partitioned tables the date suffix is no longer present and + `special query syntax `__ has to be used instead. In both cases, tables are sharded based on UTC timezone. uses_timestamp_column_partitioning (bool): Output only. True if new timestamp column based partitioning is in use, false if legacy ingestion-time partitioning is in - use. All new sinks will have this field set true and will - use timestamp column based partitioning. If + use. + + All new sinks will have this field set true and will use + timestamp column based partitioning. If use_partitioned_tables is false, this value has no meaning and will be false. Legacy sinks using partitioned tables will have this field set to false. @@ -492,13 +771,15 @@ class CreateBucketRequest(proto.Message): Attributes: parent (str): - Required. The resource in which to create the bucket: + Required. The resource in which to create the log bucket: :: "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - Example: ``"projects/my-logging-project/locations/global"`` + For example: + + ``"projects/my-project/locations/global"`` bucket_id (str): Required. A client-assigned identifier such as ``"my-bucket"``. Identifiers are limited to 100 characters @@ -540,11 +821,9 @@ class UpdateBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. - Also requires permission - "resourcemanager.projects.updateLiens" to set the locked - property + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` bucket (google.cloud.logging_v2.types.LogBucket): Required. The updated bucket. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -553,10 +832,10 @@ class UpdateBucketRequest(proto.Message): and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - For a detailed ``FieldMask`` definition, see + For a detailed ``FieldMask`` definition, see: https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=retention_days``. + For example: ``updateMask=retention_days`` """ name: str = proto.Field( @@ -589,8 +868,9 @@ class GetBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name: str = proto.Field( @@ -613,8 +893,9 @@ class DeleteBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name: str = proto.Field( @@ -637,8 +918,9 @@ class UndeleteBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name: str = proto.Field( @@ -665,7 +947,9 @@ class ListViewsRequest(proto.Message): should be identical to those in the previous call. page_size (int): Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of + request. + + Non-positive values are ignored. The presence of ``nextPageToken`` in the response indicates that more results might be available. """ @@ -721,12 +1005,16 @@ class CreateViewRequest(proto.Message): :: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + `"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"` + + For example: - Example: - ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` + ``"projects/my-project/locations/global/buckets/my-bucket"`` view_id (str): - Required. The id to use for this view. + Required. A client-assigned identifier such as + ``"my-view"``. Identifiers are limited to 100 characters and + can include only letters, digits, underscores, hyphens, and + periods. view (google.cloud.logging_v2.types.LogView): Required. The new view. """ @@ -757,8 +1045,9 @@ class UpdateViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` view (google.cloud.logging_v2.types.LogView): Required. The updated view. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -770,7 +1059,7 @@ class UpdateViewRequest(proto.Message): For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` """ name: str = proto.Field( @@ -800,8 +1089,9 @@ class GetViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` """ name: str = proto.Field( @@ -821,8 +1111,11 @@ class DeleteViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + :: + + `"projects/my-project/locations/global/buckets/my-bucket/views/my-view"` """ name: str = proto.Field( @@ -913,7 +1206,9 @@ class GetSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` """ sink_name: str = proto.Field( @@ -936,8 +1231,9 @@ class CreateSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` sink (google.cloud.logging_v2.types.LogSink): Required. The new sink, whose ``name`` parameter is a sink identifier that is not already in use. @@ -946,9 +1242,10 @@ class CreateSinkRequest(proto.Message): ``writer_identity`` in the new sink. If this value is omitted or set to false, and if the sink's parent is a project, then the value returned as ``writer_identity`` is - the same group or service account used by Logging before the - addition of writer identities to this API. The sink's - destination must be in the same project as the sink itself. + the same group or service account used by Cloud Logging + before the addition of writer identities to this API. The + sink's destination must be in the same project as the sink + itself. If this field is set to true, or if the sink is owned by a non-project resource such as an organization, then the value @@ -988,7 +1285,9 @@ class UpdateSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` sink (google.cloud.logging_v2.types.LogSink): Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. @@ -1014,16 +1313,18 @@ class UpdateSinkRequest(proto.Message): and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the + An empty ``updateMask`` is temporarily treated as using the following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed and + specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` """ sink_name: str = proto.Field( @@ -1061,7 +1362,9 @@ class DeleteSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` """ sink_name: str = proto.Field( @@ -1070,14 +1373,151 @@ class DeleteSinkRequest(proto.Message): ) +class CreateLinkRequest(proto.Message): + r"""The parameters to CreateLink. + + Attributes: + parent (str): + Required. The full resource name of the bucket to create a + link for. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + link (google.cloud.logging_v2.types.Link): + Required. The new link. + link_id (str): + Required. The ID to use for the link. The link_id can have + up to 100 characters. A valid link_id must only have + alphanumeric characters and underscores within it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + link: 'Link' = proto.Field( + proto.MESSAGE, + number=2, + message='Link', + ) + link_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteLinkRequest(proto.Message): + r"""The parameters to DeleteLink. + + Attributes: + name (str): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListLinksRequest(proto.Message): + r"""The parameters to ListLinks. + + Attributes: + parent (str): + Required. The parent resource whose links are to be listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. + page_size (int): + Optional. The maximum number of results to + return from this request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListLinksResponse(proto.Message): + r"""The response from ListLinks. + + Attributes: + links (MutableSequence[google.cloud.logging_v2.types.Link]): + A list of links. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call the same method again using the + value of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + links: MutableSequence['Link'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Link', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLinkRequest(proto.Message): + r"""The parameters to GetLink. + + Attributes: + name (str): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class LogExclusion(proto.Message): - r"""Specifies a set of log entries that are not to be stored in - Logging. If your GCP resource receives a large volume of logs, - you can use exclusions to reduce your chargeable logs. - Exclusions are processed after log sinks, so you can export log - entries before they are excluded. Note that organization-level - and folder-level exclusions don't apply to child resources, and - that you can't exclude audit log entries. + r"""Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of log entries, + you can use exclusions to reduce your chargeable logs. Note that + exclusions on organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify the \_Required + sink or exclude logs from it. Attributes: name (str): @@ -1095,10 +1535,11 @@ class LogExclusion(proto.Message): `sample function `__, you can exclude less than 100% of the matching log entries. + For example, the following query matches 99% of low-severity log entries from Google Cloud Storage buckets: - ``"resource.type=gcs_bucket severity`__ for more information. @@ -1363,11 +1809,14 @@ class GetCmekSettingsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" "folders/[FOLDER_ID]/cmekSettings" - Example: ``"organizations/12345/cmekSettings"``. + For example: + + ``"organizations/12345/cmekSettings"`` - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google + Cloud projects, folders, organizations and billing accounts. + Once configured for an organization, it applies to all + projects and folders in the Google Cloud organization. """ name: str = proto.Field( @@ -1380,7 +1829,7 @@ class UpdateCmekSettingsRequest(proto.Message): r"""The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -1395,15 +1844,18 @@ class UpdateCmekSettingsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" "folders/[FOLDER_ID]/cmekSettings" - Example: ``"organizations/12345/cmekSettings"``. + For example: + + ``"organizations/12345/cmekSettings"`` - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. cmek_settings (google.cloud.logging_v2.types.CmekSettings): Required. The CMEK settings to update. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -1415,7 +1867,7 @@ class UpdateCmekSettingsRequest(proto.Message): See [FieldMask][google.protobuf.FieldMask] for more information. - Example: ``"updateMask=kmsKeyName"`` + For example: ``"updateMask=kmsKeyName"`` """ name: str = proto.Field( @@ -1439,11 +1891,11 @@ class CmekSettings(proto.Message): associated with a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be configured for - GCP organizations. Once configured, it applies to all projects and - folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured for + Google Cloud organizations. Once configured, it applies to all + projects and folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -1455,14 +1907,209 @@ class CmekSettings(proto.Message): The resource name for the configured Cloud KMS key. KMS key name format: - "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" For example: - ``"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"`` - To enable CMEK for the Logs Router, set this field to a - valid ``kms_key_name`` for which the associated service - account has the required + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required cloudkms.cryptoKeyEncrypterDecrypter roles + assigned for the key. + + The Cloud KMS key used by the Log Router can be updated by + changing the ``kms_key_name`` to a new valid key name or + disabled by setting the key name to an empty string. + Encryption operations that are in progress will be completed + with the key that was in use when they started. Decryption + operations will be completed using the key that was used at + the time of encryption unless access to that key has been + revoked. + + To disable CMEK for the Log Router, set this field to an + empty string. + + See `Enabling CMEK for Log + Router `__ + for more information. + kms_key_version_name (str): + The CryptoKeyVersion resource name for the configured Cloud + KMS key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]/cryptoKeyVersions/[VERSION]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key/cryptoKeyVersions/1"`` + + This is a read-only field used to convey the specific + configured CryptoKeyVersion of ``kms_key`` that has been + configured. It will be populated in cases where the CMEK + settings are bound to a single key version. + + If this field is populated, the ``kms_key`` is tied to a + specific CryptoKeyVersion. + service_account_id (str): + Output only. The service account that will be used by the + Log Router to access your Cloud KMS key. + + Before enabling CMEK for Log Router, you must first assign + the cloudkms.cryptoKeyEncrypterDecrypter role to the service + account that the Log Router will use to access your Cloud + KMS key. Use + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + to obtain the service account ID. + + See `Enabling CMEK for Log + Router `__ + for more information. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_version_name: str = proto.Field( + proto.STRING, + number=4, + ) + service_account_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetSettingsRequest(proto.Message): + r"""The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing accounts. + Currently it can only be configured for organizations. Once + configured for an organization, it applies to all projects + and folders in the Google Cloud organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSettingsRequest(proto.Message): + r"""The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource name for the settings to update. + + :: + + "organizations/[ORGANIZATION_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be overwritten + if and only if it is in the update mask. Output only fields + cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + settings: 'Settings' = proto.Field( + proto.MESSAGE, + number=2, + message='Settings', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class Settings(proto.Message): + r"""Describes the settings associated with a project, folder, + organization, billing account, or flexible resource. + + Attributes: + name (str): + Output only. The resource name of the + settings. + kms_key_name (str): + Optional. The resource name for the configured Cloud KMS + key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key. @@ -1474,26 +2121,38 @@ class CmekSettings(proto.Message): the time of encryption unless access to that key has been revoked. - To disable CMEK for the Logs Router, set this field to an + To disable CMEK for the Log Router, set this field to an empty string. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. - service_account_id (str): + kms_service_account_id (str): Output only. The service account that will be used by the - Logs Router to access your Cloud KMS key. + Log Router to access your Cloud KMS key. - Before enabling CMEK for Logs Router, you must first assign + Before enabling CMEK for Log Router, you must first assign the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to - the service account that the Logs Router will use to access + the service account that the Log Router will use to access your Cloud KMS key. Use - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings] to obtain the service account ID. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + storage_location (str): + Optional. The Cloud region that will be used for \_Default + and \_Required log buckets for newly created projects and + folders. For example ``europe-west1``. This setting does not + affect the location of custom log buckets. + disable_default_sink (bool): + Optional. If set to true, the \_Default sink in newly + created projects and folders will created in a disabled + state. This can be used to automatically disable log + ingestion if there is already an aggregated sink configured + in the hierarchy. The \_Default sink can be re-enabled + manually if needed. """ name: str = proto.Field( @@ -1504,9 +2163,254 @@ class CmekSettings(proto.Message): proto.STRING, number=2, ) - service_account_id: str = proto.Field( + kms_service_account_id: str = proto.Field( + proto.STRING, + number=3, + ) + storage_location: str = proto.Field( proto.STRING, + number=4, + ) + disable_default_sink: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class CopyLogEntriesRequest(proto.Message): + r"""The parameters to CopyLogEntries. + + Attributes: + name (str): + Required. Log bucket from which to copy log entries. + + For example: + + ``"projects/my-project/locations/global/buckets/my-source-bucket"`` + filter (str): + Optional. A filter specifying which log + entries to copy. The filter must be no more than + 20k characters. An empty filter matches all log + entries. + destination (str): + Required. Destination to which to copy log + entries. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + destination: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CopyLogEntriesMetadata(proto.Message): + r"""Metadata for CopyLogEntries long running operations. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + cancellation_requested (bool): + Identifies whether the user has requested + cancellation of the operation. + request (google.cloud.logging_v2.types.CopyLogEntriesRequest): + CopyLogEntries RPC request. + progress (int): + Estimated progress of the operation (0 - + 100%). + writer_identity (str): + The IAM identity of a service account that must be granted + access to the destination. + + If the service account is not granted permission to the + destination within an hour, the operation will be cancelled. + + For example: ``"serviceAccount:foo@bar.com"`` + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + cancellation_requested: bool = proto.Field( + proto.BOOL, + number=4, + ) + request: 'CopyLogEntriesRequest' = proto.Field( + proto.MESSAGE, + number=5, + message='CopyLogEntriesRequest', + ) + progress: int = proto.Field( + proto.INT32, + number=6, + ) + writer_identity: str = proto.Field( + proto.STRING, + number=7, + ) + + +class CopyLogEntriesResponse(proto.Message): + r"""Response type for CopyLogEntries long running operations. + + Attributes: + log_entries_copied_count (int): + Number of log entries copied. + """ + + log_entries_copied_count: int = proto.Field( + proto.INT64, + number=1, + ) + + +class BucketMetadata(proto.Message): + r"""Metadata for LongRunningUpdateBucket Operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_bucket_request (google.cloud.logging_v2.types.CreateBucketRequest): + LongRunningCreateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + update_bucket_request (google.cloud.logging_v2.types.UpdateBucketRequest): + LongRunningUpdateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, number=3, + enum='OperationState', + ) + create_bucket_request: 'CreateBucketRequest' = proto.Field( + proto.MESSAGE, + number=4, + oneof='request', + message='CreateBucketRequest', + ) + update_bucket_request: 'UpdateBucketRequest' = proto.Field( + proto.MESSAGE, + number=5, + oneof='request', + message='UpdateBucketRequest', + ) + + +class LinkMetadata(proto.Message): + r"""Metadata for long running Link operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_link_request (google.cloud.logging_v2.types.CreateLinkRequest): + CreateLink RPC request. + + This field is a member of `oneof`_ ``request``. + delete_link_request (google.cloud.logging_v2.types.DeleteLinkRequest): + DeleteLink RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + create_link_request: 'CreateLinkRequest' = proto.Field( + proto.MESSAGE, + number=4, + oneof='request', + message='CreateLinkRequest', + ) + delete_link_request: 'DeleteLinkRequest' = proto.Field( + proto.MESSAGE, + number=5, + oneof='request', + message='DeleteLinkRequest', + ) + + +class LocationMetadata(proto.Message): + r"""Cloud Logging specific location metadata. + + Attributes: + log_analytics_enabled (bool): + Indicates whether or not Log Analytics + features are supported in the given location. + """ + + log_analytics_enabled: bool = proto.Field( + proto.BOOL, + number=1, ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 8d39eb807f..9a485ee8f9 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -42,6 +42,7 @@ class LogMetric(proto.Message): r"""Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -59,12 +60,12 @@ class LogMetric(proto.Message): forward-slash character (``/``) denotes a hierarchy of name pieces, and it cannot be the first character of the name. - The metric identifier in this field must not be - `URL-encoded `__. - However, when the metric identifier appears as the - ``[METRIC_ID]`` part of a ``metric_name`` API parameter, - then the metric identifier must be URL-encoded. Example: - ``"projects/my-project/metrics/nginx%2Frequests"``. + This field is the ``[METRIC_ID]`` part of a metric resource + name in the format + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". Example: If the + resource name of a metric is + ``"projects/my-project/metrics/nginx%2Frequests"``, this + field's value is ``"nginx/requests"``. description (str): Optional. A description of this metric, which is used in documentation. The maximum length of @@ -79,6 +80,20 @@ class LogMetric(proto.Message): "resource.type=gae_app AND severity>=ERROR" The maximum length of the filter is 20000 characters. + bucket_name (str): + Optional. The resource name of the Log Bucket that owns the + Log Metric. Only Log Buckets in projects are supported. The + bucket has to be in the same project as the metric. + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket`` + + If empty, then the Log Metric is considered a non-Bucket Log + Metric. + disabled (bool): + Optional. If set to True, then this metric is + disabled and it does not generate any points. metric_descriptor (google.api.metric_pb2.MetricDescriptor): Optional. The metric descriptor associated with the logs-based metric. If unspecified, it uses a default metric @@ -110,7 +125,7 @@ class LogMetric(proto.Message): distribution logs-based metric to extract the values to record from a log entry. Two functions are supported for value extraction: ``EXTRACT(field)`` or - ``REGEXP_EXTRACT(field, regex)``. The argument are: + ``REGEXP_EXTRACT(field, regex)``. The arguments are: 1. field: The name of the log entry field from which the value is to be extracted. @@ -139,7 +154,7 @@ class LogMetric(proto.Message): ``value_extractor`` field. The extracted value is converted to the type defined in the - label descriptor. If the either the extraction or the type + label descriptor. If either the extraction or the type conversion fails, the label will have a default value. The default value for a string label is an empty string, for an integer label its 0, and for a boolean label its ``false``. @@ -189,6 +204,14 @@ class ApiVersion(proto.Enum): proto.STRING, number=3, ) + bucket_name: str = proto.Field( + proto.STRING, + number=13, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=12, + ) metric_descriptor: metric_pb2.MetricDescriptor = proto.Field( proto.MESSAGE, number=5, diff --git a/tests/integration/goldens/logging/noxfile.py b/tests/integration/goldens/logging/noxfile.py index e87b6c033b..db2905b4a3 100755 --- a/tests/integration/goldens/logging/noxfile.py +++ b/tests/integration/goldens/logging/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py new file mode 100755 index 0000000000..806e937dda --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py new file mode 100755 index 0000000000..ca0209f00f --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py new file mode 100755 index 0000000000..8fe42df3c8 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py new file mode 100755 index 0000000000..1ce6987845 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py new file mode 100755 index 0000000000..8ceb529855 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateLink_async] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py new file mode 100755 index 0000000000..604ff66269 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateLink_sync] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py new file mode 100755 index 0000000000..8c7a934a73 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_delete_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_async] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py new file mode 100755 index 0000000000..dfa59b3074 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_delete_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_sync] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py new file mode 100755 index 0000000000..ddc3d131f4 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetLink_async] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py new file mode 100755 index 0000000000..3a7643b3a2 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetLink_sync] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py new file mode 100755 index 0000000000..4ee968e815 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSettings_async] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py new file mode 100755 index 0000000000..a3e018440c --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSettings_sync] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py new file mode 100755 index 0000000000..7eccffaa6b --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListLinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListLinks_async] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py new file mode 100755 index 0000000000..a2f98d69d3 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListLinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListLinks_sync] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py new file mode 100755 index 0000000000..7dde59dcdd --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py new file mode 100755 index 0000000000..2ecaf8df26 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py new file mode 100755 index 0000000000..b51dd81cc9 --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_async] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py new file mode 100755 index 0000000000..1e7aefce8f --- /dev/null +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] diff --git a/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index cf16dded69..b62675ba64 100755 --- a/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -19,19 +19,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.copy_log_entries", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateBucket" + "shortName": "CopyLogEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateBucketRequest" + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" }, { "name": "retry", @@ -46,22 +46,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "create_bucket" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "copy_log_entries" }, - "description": "Sample for CreateBucket", - "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "description": "Sample for CopyLogEntries", + "file": "logging_v2_generated_config_service_v2_copy_log_entries_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -76,17 +76,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 49, + "end": 53, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_copy_log_entries_async.py" }, { "canonical": true, @@ -95,19 +95,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.copy_log_entries", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateBucket" + "shortName": "CopyLogEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateBucketRequest" + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" }, { "name": "retry", @@ -122,22 +122,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "create_bucket" + "resultType": "google.api_core.operation.Operation", + "shortName": "copy_log_entries" }, - "description": "Sample for CreateBucket", - "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "description": "Sample for CopyLogEntries", + "file": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -152,17 +152,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 49, + "end": 53, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py" }, { "canonical": true, @@ -172,27 +172,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateExclusion" + "shortName": "CreateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateExclusionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "exclusion", - "type": "google.cloud.logging_v2.types.LogExclusion" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -207,14 +199,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "create_exclusion" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_bucket_async" }, - "description": "Sample for CreateExclusion", - "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async", "segments": [ { "end": 56, @@ -232,13 +224,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { "end": 53, - "start": 51, + "start": 47, "type": "REQUEST_EXECUTION" }, { @@ -247,7 +239,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_async_async.py" }, { "canonical": true, @@ -256,27 +248,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateExclusion" + "shortName": "CreateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateExclusionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "exclusion", - "type": "google.cloud.logging_v2.types.LogExclusion" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -291,14 +275,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "create_exclusion" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_bucket_async" }, - "description": "Sample for CreateExclusion", - "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync", "segments": [ { "end": 56, @@ -316,13 +300,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { "end": 53, - "start": 51, + "start": 47, "type": "REQUEST_EXECUTION" }, { @@ -331,7 +315,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py" }, { "canonical": true, @@ -341,27 +325,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateSink" + "shortName": "CreateBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -376,22 +352,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "create_sink" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, - "description": "Sample for CreateSink", - "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "description": "Sample for CreateBucket", + "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -401,22 +377,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_sink_async.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" }, { "canonical": true, @@ -425,27 +401,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateSink" + "shortName": "CreateBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -460,22 +428,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "create_sink" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, - "description": "Sample for CreateSink", - "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "description": "Sample for CreateBucket", + "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -485,22 +453,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" }, { "canonical": true, @@ -510,19 +478,27 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateView" + "shortName": "CreateExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" }, { "name": "retry", @@ -537,22 +513,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "create_view" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, - "description": "Sample for CreateView", - "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "description": "Sample for CreateExclusion", + "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -562,22 +538,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_view_async.py" + "title": "logging_v2_generated_config_service_v2_create_exclusion_async.py" }, { "canonical": true, @@ -586,19 +562,27 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateView" + "shortName": "CreateExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" }, { "name": "retry", @@ -613,22 +597,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "create_view" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, - "description": "Sample for CreateView", - "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "description": "Sample for CreateExclusion", + "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -638,22 +622,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_view_sync.py" + "title": "logging_v2_generated_config_service_v2_create_exclusion_sync.py" }, { "canonical": true, @@ -663,9 +647,581 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_link" + }, + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_link" + }, + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" + }, + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_sink_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" + }, + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" + }, + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_view_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" + }, + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_view_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" @@ -675,7 +1231,478 @@ "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" + }, + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" + }, + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_link" + }, + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_link" + }, + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_sink" + }, + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -690,13 +1717,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_bucket" + "shortName": "delete_sink" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { "end": 49, @@ -727,7 +1754,81 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" + }, + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_view_async.py" }, { "canonical": true, @@ -736,19 +1837,169 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" + }, + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" + }, + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteBucket" + "shortName": "GetBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.GetBucketRequest" }, { "name": "retry", @@ -763,21 +2014,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_bucket" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -792,15 +2044,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" }, { "canonical": true, @@ -810,23 +2064,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteExclusion" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -841,21 +2091,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_exclusion" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, - "description": "Sample for DeleteExclusion", - "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -870,15 +2121,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" }, { "canonical": true, @@ -887,23 +2140,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteExclusion" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -918,21 +2167,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_exclusion" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, - "description": "Sample for DeleteExclusion", - "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -947,15 +2197,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" }, { "canonical": true, @@ -965,22 +2217,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteSink" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" }, { - "name": "sink_name", + "name": "name", "type": "str" }, { @@ -996,21 +2248,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_sink" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, - "description": "Sample for DeleteSink", - "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1025,15 +2278,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" }, { "canonical": true, @@ -1042,22 +2297,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteSink" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" }, { - "name": "sink_name", + "name": "name", "type": "str" }, { @@ -1073,21 +2328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_sink" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, - "description": "Sample for DeleteSink", - "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1102,15 +2358,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" }, { "canonical": true, @@ -1120,19 +2378,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteView" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1147,21 +2409,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_view" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "get_link" }, - "description": "Sample for DeleteView", - "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1176,15 +2439,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_view_async.py" + "title": "logging_v2_generated_config_service_v2_get_link_async.py" }, { "canonical": true, @@ -1193,19 +2458,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteView" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1220,21 +2489,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_view" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "get_link" }, - "description": "Sample for DeleteView", - "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1249,15 +2519,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" + "title": "logging_v2_generated_config_service_v2_get_link_sync.py" }, { "canonical": true, @@ -1267,19 +2539,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1294,14 +2570,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", "segments": [ { "end": 51, @@ -1334,7 +2610,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_get_settings_async.py" }, { "canonical": true, @@ -1343,19 +2619,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1370,14 +2650,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", "segments": [ { "end": 51, @@ -1410,7 +2690,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_get_settings_sync.py" }, { "canonical": true, @@ -1420,19 +2700,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetCmekSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -1447,14 +2731,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "get_cmek_settings" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, - "description": "Sample for GetCmekSettings", - "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { "end": 51, @@ -1487,7 +2771,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" + "title": "logging_v2_generated_config_service_v2_get_sink_async.py" }, { "canonical": true, @@ -1496,19 +2780,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetCmekSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -1523,14 +2811,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "get_cmek_settings" - }, - "description": "Sample for GetCmekSettings", - "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" + }, + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { "end": 51, @@ -1563,7 +2851,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" + "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" }, { "canonical": true, @@ -1573,23 +2861,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetExclusion" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -1604,14 +2888,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "get_exclusion" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, - "description": "Sample for GetExclusion", - "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { "end": 51, @@ -1644,7 +2928,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_get_view_async.py" }, { "canonical": true, @@ -1653,23 +2937,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetExclusion" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -1684,14 +2964,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "get_exclusion" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, - "description": "Sample for GetExclusion", - "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { "end": 51, @@ -1724,7 +3004,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_get_view_sync.py" }, { "canonical": true, @@ -1734,22 +3014,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSink" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" }, { - "name": "sink_name", + "name": "parent", "type": "str" }, { @@ -1765,22 +3045,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "get_sink" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", + "shortName": "list_buckets" }, - "description": "Sample for GetSink", - "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1800,12 +3080,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_sink_async.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" }, { "canonical": true, @@ -1814,22 +3094,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSink" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" }, { - "name": "sink_name", + "name": "parent", "type": "str" }, { @@ -1845,22 +3125,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "get_sink" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", + "shortName": "list_buckets" }, - "description": "Sample for GetSink", - "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1880,12 +3160,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" }, { "canonical": true, @@ -1895,19 +3175,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetView" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -1922,22 +3206,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", + "shortName": "list_exclusions" }, - "description": "Sample for GetView", - "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1957,12 +3241,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_view_async.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" }, { "canonical": true, @@ -1971,19 +3255,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetView" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -1998,22 +3286,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", + "shortName": "list_exclusions" }, - "description": "Sample for GetView", - "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2033,12 +3321,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_view_sync.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" }, { "canonical": true, @@ -2048,19 +3336,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { "name": "parent", @@ -2079,14 +3367,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", - "shortName": "list_buckets" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager", + "shortName": "list_links" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_async", "segments": [ { "end": 52, @@ -2119,7 +3407,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" + "title": "logging_v2_generated_config_service_v2_list_links_async.py" }, { "canonical": true, @@ -2128,19 +3416,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { "name": "parent", @@ -2159,14 +3447,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", - "shortName": "list_buckets" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager", + "shortName": "list_links" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_sync", "segments": [ { "end": 52, @@ -2199,7 +3487,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" + "title": "logging_v2_generated_config_service_v2_list_links_sync.py" }, { "canonical": true, @@ -2209,19 +3497,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_sinks", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListExclusions" + "shortName": "ListSinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListSinksRequest" }, { "name": "parent", @@ -2240,14 +3528,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", - "shortName": "list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", + "shortName": "list_sinks" }, - "description": "Sample for ListExclusions", - "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "description": "Sample for ListSinks", + "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", "segments": [ { "end": 52, @@ -2280,7 +3568,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" + "title": "logging_v2_generated_config_service_v2_list_sinks_async.py" }, { "canonical": true, @@ -2289,19 +3577,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_sinks", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListExclusions" + "shortName": "ListSinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListSinksRequest" }, { "name": "parent", @@ -2320,14 +3608,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", - "shortName": "list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", + "shortName": "list_sinks" }, - "description": "Sample for ListExclusions", - "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "description": "Sample for ListSinks", + "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", "segments": [ { "end": 52, @@ -2360,7 +3648,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" + "title": "logging_v2_generated_config_service_v2_list_sinks_sync.py" }, { "canonical": true, @@ -2370,19 +3658,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_sinks", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_views", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListSinks" + "shortName": "ListViews" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListSinksRequest" + "type": "google.cloud.logging_v2.types.ListViewsRequest" }, { "name": "parent", @@ -2401,14 +3689,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", - "shortName": "list_sinks" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", + "shortName": "list_views" }, - "description": "Sample for ListSinks", - "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", + "description": "Sample for ListViews", + "file": "logging_v2_generated_config_service_v2_list_views_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", "segments": [ { "end": 52, @@ -2441,7 +3729,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_sinks_async.py" + "title": "logging_v2_generated_config_service_v2_list_views_async.py" }, { "canonical": true, @@ -2450,19 +3738,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_sinks", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_views", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListSinks" + "shortName": "ListViews" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListSinksRequest" + "type": "google.cloud.logging_v2.types.ListViewsRequest" }, { "name": "parent", @@ -2481,14 +3769,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", - "shortName": "list_sinks" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", + "shortName": "list_views" }, - "description": "Sample for ListSinks", - "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", + "description": "Sample for ListViews", + "file": "logging_v2_generated_config_service_v2_list_views_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", "segments": [ { "end": 52, @@ -2521,7 +3809,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_sinks_sync.py" + "title": "logging_v2_generated_config_service_v2_list_views_sync.py" }, { "canonical": true, @@ -2531,23 +3819,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_views", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.undelete_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListViews", + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListViews" + "shortName": "UndeleteBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListViewsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" }, { "name": "retry", @@ -2562,22 +3846,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", - "shortName": "list_views" + "shortName": "undelete_bucket" }, - "description": "Sample for ListViews", - "file": "logging_v2_generated_config_service_v2_list_views_async.py", + "description": "Sample for UndeleteBucket", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2592,17 +3875,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_views_async.py" + "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" }, { "canonical": true, @@ -2611,23 +3892,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_views", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.undelete_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListViews", + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListViews" + "shortName": "UndeleteBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListViewsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" }, { "name": "retry", @@ -2642,22 +3919,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", - "shortName": "list_views" + "shortName": "undelete_bucket" }, - "description": "Sample for ListViews", - "file": "logging_v2_generated_config_service_v2_list_views_sync.py", + "description": "Sample for UndeleteBucket", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2672,17 +3948,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_views_sync.py" + "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" }, { "canonical": true, @@ -2692,19 +3966,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.undelete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UndeleteBucket" + "shortName": "UpdateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" }, { "name": "retry", @@ -2719,21 +3993,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "undelete_bucket" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_bucket_async" }, - "description": "Sample for UndeleteBucket", - "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async", "segments": [ { - "end": 49, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2748,15 +4023,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_update_bucket_async_async.py" }, { "canonical": true, @@ -2765,19 +4042,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.undelete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UndeleteBucket" + "shortName": "UpdateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" }, { "name": "retry", @@ -2792,21 +4069,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "undelete_bucket" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_bucket_async" }, - "description": "Sample for UndeleteBucket", - "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync", "segments": [ { - "end": 49, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2821,15 +4099,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py" }, { "canonical": true, @@ -3314,6 +4594,175 @@ ], "title": "logging_v2_generated_config_service_v2_update_exclusion_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "update_settings" + }, + "description": "Sample for UpdateSettings", + "file": "logging_v2_generated_config_service_v2_update_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "update_settings" + }, + "description": "Sample for UpdateSettings", + "file": "logging_v2_generated_config_service_v2_update_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_settings_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py index a8da3c59cb..1654590d10 100755 --- a/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py +++ b/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py @@ -39,13 +39,17 @@ def partition( class loggingCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'copy_log_entries': ('name', 'destination', 'filter', ), 'create_bucket': ('parent', 'bucket_id', 'bucket', ), + 'create_bucket_async': ('parent', 'bucket_id', 'bucket', ), 'create_exclusion': ('parent', 'exclusion', ), + 'create_link': ('parent', 'link', 'link_id', ), 'create_log_metric': ('parent', 'metric', ), 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), 'create_view': ('parent', 'view_id', 'view', ), 'delete_bucket': ('name', ), 'delete_exclusion': ('name', ), + 'delete_link': ('name', ), 'delete_log': ('log_name', ), 'delete_log_metric': ('metric_name', ), 'delete_sink': ('sink_name', ), @@ -53,23 +57,28 @@ class loggingCallTransformer(cst.CSTTransformer): 'get_bucket': ('name', ), 'get_cmek_settings': ('name', ), 'get_exclusion': ('name', ), + 'get_link': ('name', ), 'get_log_metric': ('metric_name', ), + 'get_settings': ('name', ), 'get_sink': ('sink_name', ), 'get_view': ('name', ), 'list_buckets': ('parent', 'page_token', 'page_size', ), 'list_exclusions': ('parent', 'page_token', 'page_size', ), + 'list_links': ('parent', 'page_token', 'page_size', ), 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), 'list_log_metrics': ('parent', 'page_token', 'page_size', ), - 'list_logs': ('parent', 'page_size', 'page_token', 'resource_names', ), + 'list_logs': ('parent', 'resource_names', 'page_size', 'page_token', ), 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), 'list_sinks': ('parent', 'page_token', 'page_size', ), 'list_views': ('parent', 'page_token', 'page_size', ), 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), 'undelete_bucket': ('name', ), 'update_bucket': ('name', 'bucket', 'update_mask', ), + 'update_bucket_async': ('name', 'bucket', 'update_mask', ), 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), 'update_exclusion': ('name', 'exclusion', 'update_mask', ), 'update_log_metric': ('metric_name', 'metric', ), + 'update_settings': ('name', 'settings', 'update_mask', ), 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), 'update_view': ('name', 'view', 'update_mask', ), 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index ad8daf514f..a482c9cbf7 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -23,23 +23,20 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError @@ -48,7 +45,9 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import google.auth @@ -83,7 +82,6 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), (ConfigServiceV2AsyncClient, "grpc_asyncio"), - (ConfigServiceV2Client, "rest"), ]) def test_config_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -96,16 +94,12 @@ def test_config_service_v2_client_from_service_account_info(client_class, transp assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.ConfigServiceV2GrpcTransport, "grpc"), (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.ConfigServiceV2RestTransport, "rest"), ]) def test_config_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -122,7 +116,6 @@ def test_config_service_v2_client_service_account_always_use_jwt(transport_class @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), (ConfigServiceV2AsyncClient, "grpc_asyncio"), - (ConfigServiceV2Client, "rest"), ]) def test_config_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -138,9 +131,6 @@ def test_config_service_v2_client_from_service_account_file(client_class, transp assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @@ -148,7 +138,6 @@ def test_config_service_v2_client_get_transport_class(): transport = ConfigServiceV2Client.get_transport_class() available_transports = [ transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2RestTransport, ] assert transport in available_transports @@ -159,7 +148,6 @@ def test_config_service_v2_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest"), ]) @mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) @mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) @@ -279,8 +267,6 @@ def test_config_service_v2_client_client_options(client_class, transport_class, (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest", "true"), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest", "false"), ]) @mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) @mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) @@ -418,7 +404,6 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest"), ]) def test_config_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -443,7 +428,6 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest", None), ]) def test_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -993,6 +977,8 @@ def test_get_bucket(request_type, transport: str = 'grpc'): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], ) response = client.get_bucket(request) @@ -1008,6 +994,8 @@ def test_get_bucket(request_type, transport: str = 'grpc'): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] def test_get_bucket_empty_call(): @@ -1049,6 +1037,8 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], )) response = await client.get_bucket(request) @@ -1064,6 +1054,8 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] @pytest.mark.asyncio @@ -1134,6 +1126,298 @@ async def test_get_bucket_field_headers_async(): ) in kw['metadata'] +@pytest.mark.parametrize("request_type", [ + logging_config.CreateBucketRequest, + dict, +]) +def test_create_bucket_async(request_type, transport: str = 'grpc'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_bucket_async_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + client.create_bucket_async() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + +@pytest.mark.asyncio +async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_bucket_async_async_from_dict(): + await test_create_bucket_async_async(request_type=dict) + + +def test_create_bucket_async_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_bucket_async_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateBucketRequest, + dict, +]) +def test_update_bucket_async(request_type, transport: str = 'grpc'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_bucket_async_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + client.update_bucket_async() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + +@pytest.mark.asyncio +async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_bucket_async_async_from_dict(): + await test_update_bucket_async_async(request_type=dict) + + +def test_update_bucket_async_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_bucket_async_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + @pytest.mark.parametrize("request_type", [ logging_config.CreateBucketRequest, dict, @@ -1159,6 +1443,8 @@ def test_create_bucket(request_type, transport: str = 'grpc'): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], ) response = client.create_bucket(request) @@ -1174,6 +1460,8 @@ def test_create_bucket(request_type, transport: str = 'grpc'): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] def test_create_bucket_empty_call(): @@ -1215,6 +1503,8 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], )) response = await client.create_bucket(request) @@ -1230,6 +1520,8 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] @pytest.mark.asyncio @@ -1325,6 +1617,8 @@ def test_update_bucket(request_type, transport: str = 'grpc'): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], ) response = client.update_bucket(request) @@ -1340,6 +1634,8 @@ def test_update_bucket(request_type, transport: str = 'grpc'): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] def test_update_bucket_empty_call(): @@ -1381,6 +1677,8 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], )) response = await client.update_bucket(request) @@ -1396,6 +1694,8 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] @pytest.mark.asyncio @@ -4257,10 +4557,10 @@ async def test_delete_sink_flattened_error_async(): @pytest.mark.parametrize("request_type", [ - logging_config.ListExclusionsRequest, + logging_config.CreateLinkRequest, dict, ]) -def test_list_exclusions(request_type, transport: str = 'grpc'): +def test_create_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4272,25 +4572,22 @@ def test_list_exclusions(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_exclusions(request) + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + assert args[0] == logging_config.CreateLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == 'next_page_token_value' + assert isinstance(response, future.Future) -def test_list_exclusions_empty_call(): +def test_create_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -4300,15 +4597,15 @@ def test_list_exclusions_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: - client.list_exclusions() + client.create_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + assert args[0] == logging_config.CreateLinkRequest() @pytest.mark.asyncio -async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): +async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4320,46 +4617,45 @@ async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_ty # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_exclusions(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + assert args[0] == logging_config.CreateLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_exclusions_async_from_dict(): - await test_list_exclusions_async(request_type=dict) +async def test_create_link_async_from_dict(): + await test_create_link_async(request_type=dict) -def test_list_exclusions_field_headers(): +def test_create_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() + request = logging_config.CreateLinkRequest() request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: - call.return_value = logging_config.ListExclusionsResponse() - client.list_exclusions(request) + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4375,23 +4671,23 @@ def test_list_exclusions_field_headers(): @pytest.mark.asyncio -async def test_list_exclusions_field_headers_async(): +async def test_create_link_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() + request = logging_config.CreateLinkRequest() request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) - await client.list_exclusions(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4406,21 +4702,23 @@ async def test_list_exclusions_field_headers_async(): ) in kw['metadata'] -def test_list_exclusions_flattened(): +def test_create_link_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() + call.return_value = operations_pb2.Operation(name='operations/op') # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_exclusions( + client.create_link( parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', ) # Establish that the underlying call was made with the expected @@ -4430,9 +4728,15 @@ def test_list_exclusions_flattened(): arg = args[0].parent mock_val = 'parent_value' assert arg == mock_val + arg = args[0].link + mock_val = logging_config.Link(name='name_value') + assert arg == mock_val + arg = args[0].link_id + mock_val = 'link_id_value' + assert arg == mock_val -def test_list_exclusions_flattened_error(): +def test_create_link_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4440,29 +4744,35 @@ def test_list_exclusions_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_exclusions( - logging_config.ListExclusionsRequest(), + client.create_link( + logging_config.CreateLinkRequest(), parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', ) @pytest.mark.asyncio -async def test_list_exclusions_flattened_async(): +async def test_create_link_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() + call.return_value = operations_pb2.Operation(name='operations/op') - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_exclusions( + response = await client.create_link( parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', ) # Establish that the underlying call was made with the expected @@ -4472,9 +4782,15 @@ async def test_list_exclusions_flattened_async(): arg = args[0].parent mock_val = 'parent_value' assert arg == mock_val + arg = args[0].link + mock_val = logging_config.Link(name='name_value') + assert arg == mock_val + arg = args[0].link_id + mock_val = 'link_id_value' + assert arg == mock_val @pytest.mark.asyncio -async def test_list_exclusions_flattened_error_async(): +async def test_create_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4482,209 +4798,19 @@ async def test_list_exclusions_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_exclusions( - logging_config.ListExclusionsRequest(), + await client.create_link( + logging_config.CreateLinkRequest(), parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', ) -def test_list_exclusions_pager(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_exclusions(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in results) -def test_list_exclusions_pages(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - pages = list(client.list_exclusions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_exclusions_async_pager(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_exclusions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_exclusions_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_exclusions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize("request_type", [ - logging_config.GetExclusionRequest, + logging_config.DeleteLinkRequest, dict, ]) -def test_get_exclusion(request_type, transport: str = 'grpc'): +def test_delete_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4696,31 +4822,22 @@ def test_get_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - ) - response = client.get_exclusion(request) + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + assert args[0] == logging_config.DeleteLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert isinstance(response, future.Future) -def test_get_exclusion_empty_call(): +def test_delete_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -4730,15 +4847,15 @@ def test_get_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: - client.get_exclusion() + client.delete_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + assert args[0] == logging_config.DeleteLinkRequest() @pytest.mark.asyncio -async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): +async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4750,52 +4867,45 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - response = await client.get_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + assert args[0] == logging_config.DeleteLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_exclusion_async_from_dict(): - await test_get_exclusion_async(request_type=dict) +async def test_delete_link_async_from_dict(): + await test_delete_link_async(request_type=dict) -def test_get_exclusion_field_headers(): +def test_delete_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() + request = logging_config.DeleteLinkRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: - call.return_value = logging_config.LogExclusion() - client.get_exclusion(request) + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4811,23 +4921,23 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio -async def test_get_exclusion_field_headers_async(): +async def test_delete_link_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() + request = logging_config.DeleteLinkRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - await client.get_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4842,20 +4952,20 @@ async def test_get_exclusion_field_headers_async(): ) in kw['metadata'] -def test_get_exclusion_flattened(): +def test_delete_link_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = operations_pb2.Operation(name='operations/op') # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_exclusion( + client.delete_link( name='name_value', ) @@ -4868,7 +4978,7 @@ def test_get_exclusion_flattened(): assert arg == mock_val -def test_get_exclusion_flattened_error(): +def test_delete_link_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4876,28 +4986,30 @@ def test_get_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_exclusion( - logging_config.GetExclusionRequest(), + client.delete_link( + logging_config.DeleteLinkRequest(), name='name_value', ) @pytest.mark.asyncio -async def test_get_exclusion_flattened_async(): +async def test_delete_link_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = operations_pb2.Operation(name='operations/op') - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_exclusion( + response = await client.delete_link( name='name_value', ) @@ -4910,7 +5022,7 @@ async def test_get_exclusion_flattened_async(): assert arg == mock_val @pytest.mark.asyncio -async def test_get_exclusion_flattened_error_async(): +async def test_delete_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4918,17 +5030,17 @@ async def test_get_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_exclusion( - logging_config.GetExclusionRequest(), + await client.delete_link( + logging_config.DeleteLinkRequest(), name='name_value', ) @pytest.mark.parametrize("request_type", [ - logging_config.CreateExclusionRequest, + logging_config.ListLinksRequest, dict, ]) -def test_create_exclusion(request_type, transport: str = 'grpc'): +def test_list_links(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4940,31 +5052,25 @@ def test_create_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, + call.return_value = logging_config.ListLinksResponse( + next_page_token='next_page_token_value', ) - response = client.create_exclusion(request) + response = client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.ListLinksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert isinstance(response, pagers.ListLinksPager) + assert response.next_page_token == 'next_page_token_value' -def test_create_exclusion_empty_call(): +def test_list_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -4974,15 +5080,15 @@ def test_create_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: - client.create_exclusion() + client.list_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.ListLinksRequest() @pytest.mark.asyncio -async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): +async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4994,52 +5100,46 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( + next_page_token='next_page_token_value', )) - response = await client.create_exclusion(request) + response = await client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.ListLinksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert isinstance(response, pagers.ListLinksAsyncPager) + assert response.next_page_token == 'next_page_token_value' @pytest.mark.asyncio -async def test_create_exclusion_async_from_dict(): - await test_create_exclusion_async(request_type=dict) +async def test_list_links_async_from_dict(): + await test_list_links_async(request_type=dict) -def test_create_exclusion_field_headers(): +def test_list_links_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() + request = logging_config.ListLinksRequest() request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: - call.return_value = logging_config.LogExclusion() - client.create_exclusion(request) + call.return_value = logging_config.ListLinksResponse() + client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5055,23 +5155,23 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio -async def test_create_exclusion_field_headers_async(): +async def test_list_links_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() + request = logging_config.ListLinksRequest() request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - await client.create_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) + await client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5086,22 +5186,21 @@ async def test_create_exclusion_field_headers_async(): ) in kw['metadata'] -def test_create_exclusion_flattened(): +def test_list_links_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.ListLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_exclusion( + client.list_links( parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), ) # Establish that the underlying call was made with the expected @@ -5111,12 +5210,9 @@ def test_create_exclusion_flattened(): arg = args[0].parent mock_val = 'parent_value' assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') - assert arg == mock_val -def test_create_exclusion_flattened_error(): +def test_list_links_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5124,31 +5220,29 @@ def test_create_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_exclusion( - logging_config.CreateExclusionRequest(), + client.list_links( + logging_config.ListLinksRequest(), parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), ) @pytest.mark.asyncio -async def test_create_exclusion_flattened_async(): +async def test_list_links_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.ListLinksResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_exclusion( + response = await client.list_links( parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), ) # Establish that the underlying call was made with the expected @@ -5158,12 +5252,9 @@ async def test_create_exclusion_flattened_async(): arg = args[0].parent mock_val = 'parent_value' assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') - assert arg == mock_val @pytest.mark.asyncio -async def test_create_exclusion_flattened_error_async(): +async def test_list_links_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5171,18 +5262,209 @@ async def test_create_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_exclusion( - logging_config.CreateExclusionRequest(), + await client.list_links( + logging_config.ListLinksRequest(), parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), ) +def test_list_links_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_links(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.Link) + for i in results) +def test_list_links_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = list(client.list_links(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_links_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_links(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.Link) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_links_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize("request_type", [ - logging_config.UpdateExclusionRequest, + logging_config.GetLinkRequest, dict, ]) -def test_update_exclusion(request_type, transport: str = 'grpc'): +def test_get_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5194,31 +5476,29 @@ def test_update_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( + call.return_value = logging_config.Link( name='name_value', description='description_value', - filter='filter_value', - disabled=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.update_exclusion(request) + response = client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + assert args[0] == logging_config.GetLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) + assert isinstance(response, logging_config.Link) assert response.name == 'name_value' assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_update_exclusion_empty_call(): +def test_get_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5228,15 +5508,15 @@ def test_update_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: - client.update_exclusion() + client.get_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + assert args[0] == logging_config.GetLinkRequest() @pytest.mark.asyncio -async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): +async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5248,52 +5528,50 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( name='name_value', description='description_value', - filter='filter_value', - disabled=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, )) - response = await client.update_exclusion(request) + response = await client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + assert args[0] == logging_config.GetLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) + assert isinstance(response, logging_config.Link) assert response.name == 'name_value' assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @pytest.mark.asyncio -async def test_update_exclusion_async_from_dict(): - await test_update_exclusion_async(request_type=dict) +async def test_get_link_async_from_dict(): + await test_get_link_async(request_type=dict) -def test_update_exclusion_field_headers(): +def test_get_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() + request = logging_config.GetLinkRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: - call.return_value = logging_config.LogExclusion() - client.update_exclusion(request) + call.return_value = logging_config.Link() + client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5309,23 +5587,23 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio -async def test_update_exclusion_field_headers_async(): +async def test_get_link_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() + request = logging_config.GetLinkRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - await client.update_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) + await client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5340,23 +5618,21 @@ async def test_update_exclusion_field_headers_async(): ) in kw['metadata'] -def test_update_exclusion_flattened(): +def test_get_link_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.Link() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_exclusion( + client.get_link( name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) # Establish that the underlying call was made with the expected @@ -5366,15 +5642,9 @@ def test_update_exclusion_flattened(): arg = args[0].name mock_val = 'name_value' assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val -def test_update_exclusion_flattened_error(): +def test_get_link_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5382,33 +5652,29 @@ def test_update_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_exclusion( - logging_config.UpdateExclusionRequest(), + client.get_link( + logging_config.GetLinkRequest(), name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) @pytest.mark.asyncio -async def test_update_exclusion_flattened_async(): +async def test_get_link_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.Link() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_exclusion( + response = await client.get_link( name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) # Establish that the underlying call was made with the expected @@ -5418,15 +5684,9 @@ async def test_update_exclusion_flattened_async(): arg = args[0].name mock_val = 'name_value' assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val @pytest.mark.asyncio -async def test_update_exclusion_flattened_error_async(): +async def test_get_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5434,19 +5694,17 @@ async def test_update_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_exclusion( - logging_config.UpdateExclusionRequest(), + await client.get_link( + logging_config.GetLinkRequest(), name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) @pytest.mark.parametrize("request_type", [ - logging_config.DeleteExclusionRequest, + logging_config.ListExclusionsRequest, dict, ]) -def test_delete_exclusion(request_type, transport: str = 'grpc'): +def test_list_exclusions(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5458,22 +5716,25 @@ def test_delete_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_exclusion(request) + call.return_value = logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListExclusionsPager) + assert response.next_page_token == 'next_page_token_value' -def test_delete_exclusion_empty_call(): +def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5483,15 +5744,15 @@ def test_delete_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: - client.delete_exclusion() + client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + assert args[0] == logging_config.ListExclusionsRequest() @pytest.mark.asyncio -async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): +async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5503,43 +5764,46 @@ async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_t # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion(request) + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListExclusionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' @pytest.mark.asyncio -async def test_delete_exclusion_async_from_dict(): - await test_delete_exclusion_async(request_type=dict) +async def test_list_exclusions_async_from_dict(): + await test_list_exclusions_async(request_type=dict) -def test_delete_exclusion_field_headers(): +def test_list_exclusions_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() + request = logging_config.ListExclusionsRequest() - request.name = 'name_value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: - call.return_value = None - client.delete_exclusion(request) + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5550,28 +5814,28 @@ def test_delete_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name_value', + 'parent=parent_value', ) in kw['metadata'] @pytest.mark.asyncio -async def test_delete_exclusion_field_headers_async(): +async def test_list_exclusions_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() + request = logging_config.ListExclusionsRequest() - request.name = 'name_value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5582,37 +5846,37 @@ async def test_delete_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name_value', + 'parent=parent_value', ) in kw['metadata'] -def test_delete_exclusion_flattened(): +def test_list_exclusions_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = logging_config.ListExclusionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_exclusion( - name='name_value', + client.list_exclusions( + parent='parent_value', ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' + arg = args[0].parent + mock_val = 'parent_value' assert arg == mock_val -def test_delete_exclusion_flattened_error(): +def test_list_exclusions_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5620,41 +5884,41 @@ def test_delete_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name='name_value', + client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', ) @pytest.mark.asyncio -async def test_delete_exclusion_flattened_async(): +async def test_list_exclusions_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = logging_config.ListExclusionsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_exclusion( - name='name_value', + response = await client.list_exclusions( + parent='parent_value', ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' + arg = args[0].parent + mock_val = 'parent_value' assert arg == mock_val @pytest.mark.asyncio -async def test_delete_exclusion_flattened_error_async(): +async def test_list_exclusions_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5662,69 +5926,263 @@ async def test_delete_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name='name_value', + await client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', ) -@pytest.mark.parametrize("request_type", [ - logging_config.GetCmekSettingsRequest, - dict, -]) -def test_get_cmek_settings(request_type, transport: str = 'grpc'): +def test_list_exclusions_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.list_exclusions), '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, ) - response = client.get_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_exclusions(request={}) + assert pager._metadata == metadata -def test_get_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in results) +def test_list_exclusions_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.list_exclusions), '__call__') as call: - client.get_cmek_settings() + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = list(client.list_exclusions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_exclusions_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_exclusions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_exclusions_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_exclusions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging_config.GetExclusionRequest, + dict, +]) +def test_get_exclusion(request_type, transport: str = 'grpc'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_get_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + client.get_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + assert args[0] == logging_config.GetExclusionRequest() @pytest.mark.asyncio -async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): +async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5736,50 +6194,52 @@ async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.get_exclusion), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', + description='description_value', + filter='filter_value', + disabled=True, )) - response = await client.get_cmek_settings(request) + response = await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + assert args[0] == logging_config.GetExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) + assert isinstance(response, logging_config.LogExclusion) assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True @pytest.mark.asyncio -async def test_get_cmek_settings_async_from_dict(): - await test_get_cmek_settings_async(request_type=dict) +async def test_get_exclusion_async_from_dict(): + await test_get_exclusion_async(request_type=dict) -def test_get_cmek_settings_field_headers(): +def test_get_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() + request = logging_config.GetExclusionRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.get_exclusion), '__call__') as call: - call.return_value = logging_config.CmekSettings() - client.get_cmek_settings(request) + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5795,23 +6255,23 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio -async def test_get_cmek_settings_field_headers_async(): +async def test_get_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() + request = logging_config.GetExclusionRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.get_exclusion), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) - await client.get_cmek_settings(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5826,45 +6286,129 @@ async def test_get_cmek_settings_field_headers_async(): ) in kw['metadata'] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateCmekSettingsRequest, - dict, -]) -def test_update_cmek_settings(request_type, transport: str = 'grpc'): +def test_get_exclusion_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.get_exclusion), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_exclusion( name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', ) - response = client.update_cmek_settings(request) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateExclusionRequest, + dict, +]) +def test_create_exclusion(request_type, transport: str = 'grpc'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) + assert isinstance(response, logging_config.LogExclusion) assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True -def test_update_cmek_settings_empty_call(): +def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5874,15 +6418,15 @@ def test_update_cmek_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.create_exclusion), '__call__') as call: - client.update_cmek_settings() + client.create_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + assert args[0] == logging_config.CreateExclusionRequest() @pytest.mark.asyncio -async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): +async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5894,50 +6438,52 @@ async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.create_exclusion), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', + description='description_value', + filter='filter_value', + disabled=True, )) - response = await client.update_cmek_settings(request) + response = await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) + assert isinstance(response, logging_config.LogExclusion) assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True @pytest.mark.asyncio -async def test_update_cmek_settings_async_from_dict(): - await test_update_cmek_settings_async(request_type=dict) +async def test_create_exclusion_async_from_dict(): + await test_create_exclusion_async(request_type=dict) -def test_update_cmek_settings_field_headers(): +def test_create_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() + request = logging_config.CreateExclusionRequest() - request.name = 'name_value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.create_exclusion), '__call__') as call: - call.return_value = logging_config.CmekSettings() - client.update_cmek_settings(request) + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5948,28 +6494,28 @@ def test_update_cmek_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name_value', + 'parent=parent_value', ) in kw['metadata'] @pytest.mark.asyncio -async def test_update_cmek_settings_field_headers_async(): +async def test_create_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() + request = logging_config.CreateExclusionRequest() - request.name = 'name_value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.create_exclusion), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) - await client.update_cmek_settings(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5980,5923 +6526,2677 @@ async def test_update_cmek_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name_value', + 'parent=parent_value', ) in kw['metadata'] -@pytest.mark.parametrize("request_type", [ - logging_config.ListBucketsRequest, - dict, -]) -def test_list_buckets_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListBucketsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_buckets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_buckets_rest_required_fields(request_type=logging_config.ListBucketsRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_buckets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_buckets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.ListBucketsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.ListBucketsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_buckets(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_buckets_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_buckets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_buckets_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_buckets") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_buckets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.ListBucketsRequest.pb(logging_config.ListBucketsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.ListBucketsResponse.to_json(logging_config.ListBucketsResponse()) - - request = logging_config.ListBucketsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.ListBucketsResponse() - - client.list_buckets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_buckets_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListBucketsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_buckets(request) - - -def test_list_buckets_rest_flattened(): +def test_create_exclusion_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListBucketsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2/locations/sample3'} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_exclusion( parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListBucketsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_buckets(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*/locations/*}/buckets" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val -def test_list_buckets_rest_flattened_error(transport: str = 'rest'): +def test_create_exclusion_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_buckets( - logging_config.ListBucketsRequest(), + client.create_exclusion( + logging_config.CreateExclusionRequest(), parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), ) - -def test_list_buckets_rest_pager(transport: str = 'rest'): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_create_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - next_page_token='abc', - ), - logging_config.ListBucketsResponse( - buckets=[], - next_page_token='def', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - ], - next_page_token='ghi', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_config.ListBucketsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() - sample_request = {'parent': 'sample1/sample2/locations/sample3'} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_exclusion( + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) - pager = client.list_buckets(request=sample_request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogBucket) - for i in results) +@pytest.mark.asyncio +async def test_create_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - pages = list(client.list_buckets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) @pytest.mark.parametrize("request_type", [ - logging_config.GetBucketRequest, - dict, + logging_config.UpdateExclusionRequest, + dict, ]) -def test_get_bucket_rest(request_type): +def test_update_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, ) + response = client.update_exclusion(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_bucket(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) + assert isinstance(response, logging_config.LogExclusion) assert response.name == 'name_value' assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -def test_get_bucket_rest_required_fields(request_type=logging_config.GetBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + assert response.filter == 'filter_value' + assert response.disabled is True - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +def test_update_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport='grpc', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_bucket(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + client.update_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_bucket") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_bucket") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetBucketRequest.pb(logging_config.GetBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogBucket.to_json(logging_config.LogBucket()) - - request = logging_config.GetBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogBucket() - - client.get_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetBucketRequest): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_bucket(request) - - -def test_get_bucket_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.CreateBucketRequest, - dict, -]) -def test_create_bucket_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3'} - request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.update_exclusion(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_bucket(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) + assert isinstance(response, logging_config.LogExclusion) assert response.name == 'name_value' assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - + assert response.filter == 'filter_value' + assert response.disabled is True -def test_create_bucket_rest_required_fields(request_type=logging_config.CreateBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request_init["bucket_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "bucketId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "bucketId" in jsonified_request - assert jsonified_request["bucketId"] == request_init["bucket_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["bucketId"] = 'bucket_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_bucket._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("bucket_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "bucketId" in jsonified_request - assert jsonified_request["bucketId"] == 'bucket_id_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 +@pytest.mark.asyncio +async def test_update_exclusion_async_from_dict(): + await test_update_exclusion_async(request_type=dict) - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_update_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) - response = client.create_bucket(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() - expected_params = [ - ( - "bucketId", - "", - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request) -def test_create_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.create_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(("bucketId", )) & set(("parent", "bucketId", "bucket", ))) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_bucket") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_bucket") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.CreateBucketRequest.pb(logging_config.CreateBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogBucket.to_json(logging_config.LogBucket()) - - request = logging_config.CreateBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogBucket() - - client.create_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateBucketRequest): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3'} - request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_bucket(request) + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.update_exclusion(request) -def test_create_bucket_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateBucketRequest, - dict, -]) -def test_update_bucket_rest(request_type): +def test_update_exclusion_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_bucket(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -def test_update_bucket_rest_required_fields(request_type=logging_config.UpdateBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_bucket._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +def test_update_exclusion_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +@pytest.mark.asyncio +async def test_update_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - response = client.update_bucket(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val -def test_update_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) +@pytest.mark.asyncio +async def test_update_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - unset_fields = transport.update_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("name", "bucket", "updateMask", ))) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_bucket") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_bucket") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateBucketRequest.pb(logging_config.UpdateBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogBucket.to_json(logging_config.LogBucket()) - - request = logging_config.UpdateBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogBucket() - - client.update_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateBucketRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteExclusionRequest, + dict, +]) +def test_delete_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_bucket(request) + # Establish that the response is the type that we expect. + assert response is None -def test_update_bucket_rest_error(): +def test_delete_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport='grpc', ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + client.delete_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteBucketRequest, - dict, -]) -def test_delete_bucket_rest(request_type): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_exclusion(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_bucket(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() # Establish that the response is the type that we expect. assert response is None -def test_delete_bucket_rest_required_fields(request_type=logging_config.DeleteBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) +@pytest.mark.asyncio +async def test_delete_exclusion_async_from_dict(): + await test_delete_exclusion_async(request_type=dict) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +def test_delete_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_bucket(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = None + client.delete_exclusion(request) -def test_delete_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.delete_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( +@pytest.mark.asyncio +async def test_delete_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_bucket") as pre: - pre.assert_not_called() - pb_message = logging_config.DeleteBucketRequest.pb(logging_config.DeleteBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() + request.name = 'name_value' - request = logging_config.DeleteBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request) - client.delete_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - pre.assert_called_once() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_delete_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteBucketRequest): +def test_delete_exclusion_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_exclusion( + name='name_value', + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_bucket(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_delete_bucket_rest_error(): +def test_delete_exclusion_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name='name_value', + ) -@pytest.mark.parametrize("request_type", [ - logging_config.UndeleteBucketRequest, - dict, -]) -def test_undelete_bucket_rest(request_type): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.undelete_bucket(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None - # Establish that the response is the type that we expect. - assert response is None + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_undelete_bucket_rest_required_fields(request_type=logging_config.UndeleteBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name='name_value', + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - # verify fields with default values are dropped +@pytest.mark.parametrize("request_type", [ + logging_config.GetCmekSettingsRequest, + dict, +]) +def test_get_cmek_settings(request_type, transport: str = 'grpc'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undelete_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + ) + response = client.get_cmek_settings(request) - jsonified_request["name"] = 'name_value' + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undelete_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +def test_get_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport='grpc', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + client.get_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = '' +@pytest.mark.asyncio +async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.undelete_bucket(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + response = await client.get_cmek_settings(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' -def test_undelete_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.undelete_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) +@pytest.mark.asyncio +async def test_get_cmek_settings_async_from_dict(): + await test_get_cmek_settings_async(request_type=dict) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_undelete_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( +def test_get_cmek_settings_field_headers(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_undelete_bucket") as pre: - pre.assert_not_called() - pb_message = logging_config.UndeleteBucketRequest.pb(logging_config.UndeleteBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() + request.name = 'name_value' - request = logging_config.UndeleteBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request) - client.undelete_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - pre.assert_called_once() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_undelete_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.UndeleteBucketRequest): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_get_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = 'name_value' - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.undelete_bucket(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client.get_cmek_settings(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_undelete_bucket_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] @pytest.mark.parametrize("request_type", [ - logging_config.ListViewsRequest, - dict, + logging_config.UpdateCmekSettingsRequest, + dict, ]) -def test_list_views_rest(request_type): +def test_update_cmek_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListViewsResponse( - next_page_token='next_page_token_value', + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', ) + response = client.update_cmek_settings(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListViewsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_views(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_views_rest_required_fields(request_type=logging_config.ListViewsRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_views._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_views._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' +def test_update_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport='grpc', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.ListViewsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + client.update_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() - pb_return_value = logging_config.ListViewsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) +@pytest.mark.asyncio +async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.list_views(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + response = await client.update_cmek_settings(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' -def test_list_views_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.list_views._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) +@pytest.mark.asyncio +async def test_update_cmek_settings_async_from_dict(): + await test_update_cmek_settings_async(request_type=dict) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_views_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_views") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_views") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.ListViewsRequest.pb(logging_config.ListViewsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.ListViewsResponse.to_json(logging_config.ListViewsResponse()) - - request = logging_config.ListViewsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.ListViewsResponse() - - client.list_views(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_views_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListViewsRequest): +def test_update_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_views(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + request.name = 'name_value' -def test_list_views_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListViewsResponse() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListViewsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +@pytest.mark.asyncio +async def test_update_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - client.list_views(**mock_args) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*/locations/*/buckets/*}/views" % client.transport._host, args[1]) + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client.update_cmek_settings(request) -def test_list_views_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_views( - logging_config.ListViewsRequest(), - parent='parent_value', - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_list_views_rest_pager(transport: str = 'rest'): +@pytest.mark.parametrize("request_type", [ + logging_config.GetSettingsRequest, + dict, +]) +def test_get_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token='abc', - ), - logging_config.ListViewsResponse( - views=[], - next_page_token='def', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token='ghi', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_config.ListViewsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - pager = client.list_views(request=sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + ) + response = client.get_settings(request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogView) - for i in results) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() - pages = list(client.list_views(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True -@pytest.mark.parametrize("request_type", [ - logging_config.GetViewRequest, - dict, -]) -def test_get_view_rest(request_type): +def test_get_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport='grpc', ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + client.get_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - ) +@pytest.mark.asyncio +async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_view(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + response = await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) + assert isinstance(response, logging_config.Settings) assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True + +@pytest.mark.asyncio +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) -def test_get_view_rest_required_fields(request_type=logging_config.GetViewRequest): - transport_class = transports.ConfigServiceV2RestTransport - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) +def test_get_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) - # verify fields with default values are dropped + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request.name = 'name_value' - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client.get_settings(request) - jsonified_request["name"] = 'name_value' + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_get_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() - response_value = Response() - response_value.status_code = 200 + request.name = 'name_value' - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + await client.get_settings(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - response = client.get_view(request) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +def test_get_settings_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_get_view_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_settings( + name='name_value', + ) - unset_fields = transport.get_view._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_view_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( +def test_get_settings_flattened_error(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + logging_config.GetSettingsRequest(), + name='name_value', ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_view") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_view") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetViewRequest.pb(logging_config.GetViewRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogView.to_json(logging_config.LogView()) - - request = logging_config.GetViewRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogView() - - client.get_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetViewRequest): - client = ConfigServiceV2Client( + +@pytest.mark.asyncio +async def test_get_settings_flattened_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_view(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_settings( + name='name_value', + ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_get_view_rest_error(): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_get_settings_flattened_error_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_settings( + logging_config.GetSettingsRequest(), + name='name_value', + ) + @pytest.mark.parametrize("request_type", [ - logging_config.CreateViewRequest, - dict, + logging_config.UpdateSettingsRequest, + dict, ]) -def test_create_view_rest(request_type): +def test_update_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, ) + response = client.update_settings(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_view(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) + assert isinstance(response, logging_config.Settings) assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -def test_create_view_rest_required_fields(request_type=logging_config.CreateViewRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request_init["view_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "viewId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "viewId" in jsonified_request - assert jsonified_request["viewId"] == request_init["view_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["viewId"] = 'view_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_view._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "viewId" in jsonified_request - assert jsonified_request["viewId"] == 'view_id_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_view(request) - - expected_params = [ - ( - "viewId", - "", - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_view_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_view._get_unset_required_fields({}) - assert set(unset_fields) == (set(("viewId", )) & set(("parent", "viewId", "view", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_view_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_view") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_view") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.CreateViewRequest.pb(logging_config.CreateViewRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogView.to_json(logging_config.LogView()) - - request = logging_config.CreateViewRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogView() - - client.create_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateViewRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_view(request) + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True -def test_create_view_rest_error(): +def test_update_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport='grpc', ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + client.update_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateViewRequest, - dict, -]) -def test_update_view_rest(request_type): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + response = await client.update_settings(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_view(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) + assert isinstance(response, logging_config.Settings) assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -def test_update_view_rest_required_fields(request_type=logging_config.UpdateViewRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True - jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_view._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) +@pytest.mark.asyncio +async def test_update_settings_async_from_dict(): + await test_update_settings_async(request_type=dict) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +def test_update_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_view(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client.update_settings(request) -def test_update_view_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.update_view._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("name", "view", ))) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_view_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_view") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_view") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateViewRequest.pb(logging_config.UpdateViewRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogView.to_json(logging_config.LogView()) - - request = logging_config.UpdateViewRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogView() - - client.update_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateViewRequest): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_view(request) + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + await client.update_settings(request) -def test_update_view_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteViewRequest, - dict, -]) -def test_delete_view_rest(request_type): +def test_update_settings_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_view(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_view_rest_required_fields(request_type=logging_config.DeleteViewRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_settings( + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +def test_update_settings_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_view(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_view_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_view._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_view_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_view") as pre: - pre.assert_not_called() - pb_message = logging_config.DeleteViewRequest.pb(logging_config.DeleteViewRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging_config.DeleteViewRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - pre.assert_called_once() - - -def test_delete_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteViewRequest): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_settings_flattened_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_view(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_settings( + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val -def test_delete_view_rest_error(): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_settings_flattened_error_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + @pytest.mark.parametrize("request_type", [ - logging_config.ListSinksRequest, - dict, + logging_config.CopyLogEntriesRequest, + dict, ]) -def test_list_sinks_rest(request_type): +def test_copy_log_entries(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListSinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.copy_log_entries(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_sinks(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_sinks_rest_required_fields(request_type=logging_config.ListSinksRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sinks._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' + assert isinstance(response, future.Future) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sinks._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' +def test_copy_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport='grpc', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = logging_config.ListSinksResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + client.copy_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() - pb_return_value = logging_config.ListSinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) +@pytest.mark.asyncio +async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.list_sinks(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.copy_log_entries(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_sinks_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.list_sinks._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) +@pytest.mark.asyncio +async def test_copy_log_entries_async_from_dict(): + await test_copy_log_entries_async(request_type=dict) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_sinks_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_sinks") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_sinks") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.ListSinksRequest.pb(logging_config.ListSinksRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.ListSinksResponse.to_json(logging_config.ListSinksResponse()) - - request = logging_config.ListSinksRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.ListSinksResponse() - - client.list_sinks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_sinks_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListSinksRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_sinks(request) - - -def test_list_sinks_rest_flattened(): - client = ConfigServiceV2Client( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListSinksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListSinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_sinks(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/sinks" % client.transport._host, args[1]) - -def test_list_sinks_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. + options = client_options.ClientOptions() + options.api_key = "api_key" with pytest.raises(ValueError): - client.list_sinks( - logging_config.ListSinksRequest(), - parent='parent_value', + client = ConfigServiceV2Client( + client_options=options, + transport=transport, ) + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) -def test_list_sinks_rest_pager(transport: str = 'rest'): - client = ConfigServiceV2Client( + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token='abc', - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token='def', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token='ghi', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_config.ListSinksResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_sinks(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogSink) - for i in results) - - pages = list(client.list_sinks(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetSinkRequest, - dict, -]) -def test_get_sink_rest(request_type): - client = ConfigServiceV2Client( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + client = ConfigServiceV2Client(transport=transport) + assert client.transport is transport - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_sink(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -def test_get_sink_rest_required_fields(request_type=logging_config.GetSinkRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["sink_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["sinkName"] = 'sink_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "sinkName" in jsonified_request - assert jsonified_request["sinkName"] == 'sink_name_value' - - client = ConfigServiceV2Client( +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_sink(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_sink_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_sink._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("sinkName", ))) - + channel = transport.grpc_channel + assert channel -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_sink_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_sink") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_sink") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetSinkRequest.pb(logging_config.GetSinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogSink.to_json(logging_config.LogSink()) - - request = logging_config.GetSinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogSink() - - client.get_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetSinkRequest): - client = ConfigServiceV2Client( + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + channel = transport.grpc_channel + assert channel - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_sink(request) - +@pytest.mark.parametrize("transport_class", [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -def test_get_sink_rest_flattened(): - client = ConfigServiceV2Client( +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = ConfigServiceV2Client.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + assert transport.kind == transport_name - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - - # get arguments that satisfy an http rule for this method - sample_request = {'sink_name': 'sample1/sample2/sinks/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - sink_name='sink_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_sink(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{sink_name=*/*/sinks/*}" % client.transport._host, args[1]) - - -def test_get_sink_rest_flattened_error(transport: str = 'rest'): +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + ) + assert isinstance( + client.transport, + transports.ConfigServiceV2GrpcTransport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_sink( - logging_config.GetSinkRequest(), - sink_name='sink_name_value', +def test_config_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ConfigServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" ) -def test_get_sink_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - +def test_config_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.ConfigServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) -@pytest.mark.parametrize("request_type", [ - logging_config.CreateSinkRequest, - dict, -]) -def test_create_sink_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_buckets', + 'get_bucket', + 'create_bucket_async', + 'update_bucket_async', + 'create_bucket', + 'update_bucket', + 'delete_bucket', + 'undelete_bucket', + 'list_views', + 'get_view', + 'create_view', + 'update_view', + 'delete_view', + 'list_sinks', + 'get_sink', + 'create_sink', + 'update_sink', + 'delete_sink', + 'create_link', + 'delete_link', + 'list_links', + 'get_link', + 'list_exclusions', + 'get_exclusion', + 'create_exclusion', + 'update_exclusion', + 'delete_exclusion', + 'get_cmek_settings', + 'update_cmek_settings', + 'get_settings', + 'update_settings', + 'copy_log_entries', + 'get_operation', + 'cancel_operation', + 'list_operations', ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - ) + with pytest.raises(NotImplementedError): + transport.close() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_sink(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -def test_create_sink_rest_required_fields(request_type=logging_config.CreateSinkRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_sink._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("unique_writer_identity", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result +def test_config_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id="octopus", + ) - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) +def test_config_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport() + adc.assert_called_once() - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_sink(request) +def test_config_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfigServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id=None, + ) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), + quota_project_id="octopus", + ) -def test_create_sink_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.create_sink._get_unset_required_fields({}) - assert set(unset_fields) == (set(("uniqueWriterIdentity", )) & set(("parent", "sink", ))) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_sink_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_sink") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_sink") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.CreateSinkRequest.pb(logging_config.CreateSinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogSink.to_json(logging_config.LogSink()) - - request = logging_config.CreateSinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogSink() - - client.create_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateSinkRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_sink(request) - -def test_create_sink_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_sink(**mock_args) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/sinks" % client.transport._host, args[1]) + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) -def test_create_sink_rest_flattened_error(transport: str = 'rest'): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_config_service_v2_host_no_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + transport=transport_name, ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_sink( - logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), - ) - - -def test_create_sink_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + assert client.transport._host == ( + 'logging.googleapis.com:443' ) - -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateSinkRequest, - dict, +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", ]) -def test_update_sink_rest(request_type): +def test_config_service_v2_host_with_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:8000' ) - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} - request = request_type(**request_init) +def test_config_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - ) + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_sink(request) +def test_config_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -def test_update_sink_rest_required_fields(request_type=logging_config.UpdateSinkRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["sink_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["sinkName"] = 'sink_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_sink._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("unique_writer_identity", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "sinkName" in jsonified_request - assert jsonified_request["sinkName"] == 'sink_name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, ) - request = request_type(**request_init) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "put", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() - response = client.update_sink(request) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() -def test_update_sink_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) - unset_fields = transport.update_sink._get_unset_required_fields({}) - assert set(unset_fields) == (set(("uniqueWriterIdentity", "updateMask", )) & set(("sinkName", "sink", ))) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_sink_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_sink") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_sink") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateSinkRequest.pb(logging_config.UpdateSinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogSink.to_json(logging_config.LogSink()) - - request = logging_config.UpdateSinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogSink() - - client.update_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateSinkRequest): +def test_config_service_v2_grpc_lro_client(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='grpc', ) + transport = client.transport - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} - request = request_type(**request_init) + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_sink(request) + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client -def test_update_sink_rest_flattened(): - client = ConfigServiceV2Client( +def test_config_service_v2_grpc_lro_async_client(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport='grpc_asyncio', ) + transport = client.transport - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - - # get arguments that satisfy an http rule for this method - sample_request = {'sink_name': 'sample1/sample2/sinks/sample3'} + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) - # get truthy value for each flattened field - mock_args = dict( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - client.update_sink(**mock_args) +def test_cmek_settings_path(): + project = "squid" + expected = "projects/{project}/cmekSettings".format(project=project, ) + actual = ConfigServiceV2Client.cmek_settings_path(project) + assert expected == actual - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{sink_name=*/*/sinks/*}" % client.transport._host, args[1]) +def test_parse_cmek_settings_path(): + expected = { + "project": "clam", + } + path = ConfigServiceV2Client.cmek_settings_path(**expected) -def test_update_sink_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_cmek_settings_path(path) + assert expected == actual - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_sink( - logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) +def test_link_path(): + project = "whelk" + location = "octopus" + bucket = "oyster" + link = "nudibranch" + expected = "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + actual = ConfigServiceV2Client.link_path(project, location, bucket, link) + assert expected == actual -def test_update_sink_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_parse_link_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "bucket": "winkle", + "link": "nautilus", + } + path = ConfigServiceV2Client.link_path(**expected) + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_link_path(path) + assert expected == actual -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteSinkRequest, - dict, -]) -def test_delete_sink_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_log_bucket_path(): + project = "scallop" + location = "abalone" + bucket = "squid" + expected = "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) + assert expected == actual - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None +def test_parse_log_bucket_path(): + expected = { + "project": "clam", + "location": "whelk", + "bucket": "octopus", + } + path = ConfigServiceV2Client.log_bucket_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_bucket_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_sink(request) +def test_log_exclusion_path(): + project = "oyster" + exclusion = "nudibranch" + expected = "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) + assert expected == actual - # Establish that the response is the type that we expect. - assert response is None +def test_parse_log_exclusion_path(): + expected = { + "project": "cuttlefish", + "exclusion": "mussel", + } + path = ConfigServiceV2Client.log_exclusion_path(**expected) -def test_delete_sink_rest_required_fields(request_type=logging_config.DeleteSinkRequest): - transport_class = transports.ConfigServiceV2RestTransport + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_exclusion_path(path) + assert expected == actual - request_init = {} - request_init["sink_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) +def test_log_sink_path(): + project = "winkle" + sink = "nautilus" + expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + actual = ConfigServiceV2Client.log_sink_path(project, sink) + assert expected == actual - # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) +def test_parse_log_sink_path(): + expected = { + "project": "scallop", + "sink": "abalone", + } + path = ConfigServiceV2Client.log_sink_path(**expected) - # verify required fields with default values are now present + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_sink_path(path) + assert expected == actual - jsonified_request["sinkName"] = 'sink_name_value' +def test_log_view_path(): + project = "squid" + location = "clam" + bucket = "whelk" + view = "octopus" + expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) + assert expected == actual - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "sinkName" in jsonified_request - assert jsonified_request["sinkName"] == 'sink_name_value' +def test_parse_log_view_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "bucket": "cuttlefish", + "view": "mussel", + } + path = ConfigServiceV2Client.log_view_path(**expected) - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_view_path(path) + assert expected == actual - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result +def test_settings_path(): + project = "winkle" + expected = "projects/{project}/settings".format(project=project, ) + actual = ConfigServiceV2Client.settings_path(project) + assert expected == actual - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_parse_settings_path(): + expected = { + "project": "nautilus", + } + path = ConfigServiceV2Client.settings_path(**expected) - response = client.delete_sink(request) + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_settings_path(path) + assert expected == actual - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ConfigServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual -def test_delete_sink_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = ConfigServiceV2Client.common_billing_account_path(**expected) - unset_fields = transport.delete_sink._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("sinkName", ))) + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = ConfigServiceV2Client.common_folder_path(folder) + assert expected == actual -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_sink_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_sink") as pre: - pre.assert_not_called() - pb_message = logging_config.DeleteSinkRequest.pb(logging_config.DeleteSinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = ConfigServiceV2Client.common_folder_path(**expected) - request = logging_config.DeleteSinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_folder_path(path) + assert expected == actual - client.delete_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ConfigServiceV2Client.common_organization_path(organization) + assert expected == actual - pre.assert_called_once() +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = ConfigServiceV2Client.common_organization_path(**expected) -def test_delete_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteSinkRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_sink(request) + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_organization_path(path) + assert expected == actual +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = ConfigServiceV2Client.common_project_path(project) + assert expected == actual -def test_delete_sink_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = ConfigServiceV2Client.common_project_path(**expected) - # get arguments that satisfy an http rule for this method - sample_request = {'sink_name': 'sample1/sample2/sinks/sample3'} + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_project_path(path) + assert expected == actual - # get truthy value for each flattened field - mock_args = dict( - sink_name='sink_name_value', - ) - mock_args.update(sample_request) +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ConfigServiceV2Client.common_location_path(project, location) + assert expected == actual - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - client.delete_sink(**mock_args) +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = ConfigServiceV2Client.common_location_path(**expected) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{sink_name=*/*/sinks/*}" % client.transport._host, args[1]) + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_location_path(path) + assert expected == actual -def test_delete_sink_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_sink( - logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', + with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) + prep.assert_called_once_with(client_info) + with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = ConfigServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) -def test_delete_sink_rest_error(): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport="grpc_asyncio", ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() -@pytest.mark.parametrize("request_type", [ - logging_config.ListExclusionsRequest, - dict, -]) -def test_list_exclusions_rest(request_type): +def test_cancel_operation(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListExclusionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_exclusions(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_exclusions_rest_required_fields(request_type=logging_config.ListExclusionsRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_exclusions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_exclusions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.ListExclusionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.ListExclusionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_exclusions(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_exclusions_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_exclusions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_exclusions_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_exclusions") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_exclusions") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.ListExclusionsRequest.pb(logging_config.ListExclusionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.ListExclusionsResponse.to_json(logging_config.ListExclusionsResponse()) - - request = logging_config.ListExclusionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.ListExclusionsResponse() - - client.list_exclusions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_exclusions_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListExclusionsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_exclusions(request) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the response is the type that we expect. + assert response is None -def test_list_exclusions_rest_flattened(): +def test_cancel_operation_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListExclusionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListExclusionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_exclusions(**mock_args) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/exclusions" % client.transport._host, args[1]) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_list_exclusions_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_exclusions( - logging_config.ListExclusionsRequest(), - parent='parent_value', - ) - - -def test_list_exclusions_rest_pager(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_config.ListExclusionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_exclusions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in results) - - pages = list(client.list_exclusions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.GetExclusionRequest, - dict, -]) -def test_get_exclusion_rest(request_type): +def test_cancel_operation_from_dict(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, + response = client.cancel_operation( + request={ + "name": "locations", + } ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_exclusion(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -def test_get_exclusion_rest_required_fields(request_type=logging_config.GetExclusionRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_exclusion(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_exclusion_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_exclusion._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_exclusion_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_exclusion") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_exclusion") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetExclusionRequest.pb(logging_config.GetExclusionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogExclusion.to_json(logging_config.LogExclusion()) - - request = logging_config.GetExclusionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogExclusion() - - client.get_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetExclusionRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_exclusion(request) - - -def test_get_exclusion_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/exclusions/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_exclusion(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=*/*/exclusions/*}" % client.transport._host, args[1]) - - -def test_get_exclusion_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_exclusion( - logging_config.GetExclusionRequest(), - name='name_value', - ) - - -def test_get_exclusion_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.CreateExclusionRequest, - dict, -]) -def test_create_exclusion_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_exclusion(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -def test_create_exclusion_rest_required_fields(request_type=logging_config.CreateExclusionRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_exclusion(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_exclusion_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_exclusion._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "exclusion", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_exclusion_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_exclusion") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_exclusion") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.CreateExclusionRequest.pb(logging_config.CreateExclusionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogExclusion.to_json(logging_config.LogExclusion()) - - request = logging_config.CreateExclusionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogExclusion() - - client.create_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateExclusionRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_exclusion(request) - - -def test_create_exclusion_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_exclusion(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/exclusions" % client.transport._host, args[1]) - - -def test_create_exclusion_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_exclusion( - logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), - ) - - -def test_create_exclusion_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateExclusionRequest, - dict, -]) -def test_update_exclusion_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_exclusion(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -def test_update_exclusion_rest_required_fields(request_type=logging_config.UpdateExclusionRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_exclusion._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_exclusion(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_exclusion_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_exclusion._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("name", "exclusion", "updateMask", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_exclusion_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_exclusion") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_exclusion") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateExclusionRequest.pb(logging_config.UpdateExclusionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogExclusion.to_json(logging_config.LogExclusion()) - - request = logging_config.UpdateExclusionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogExclusion() - - client.update_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateExclusionRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_exclusion(request) - - -def test_update_exclusion_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/exclusions/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_exclusion(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=*/*/exclusions/*}" % client.transport._host, args[1]) - - -def test_update_exclusion_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_exclusion( - logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_exclusion_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteExclusionRequest, - dict, -]) -def test_delete_exclusion_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_exclusion(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_exclusion_rest_required_fields(request_type=logging_config.DeleteExclusionRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_exclusion(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_exclusion_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_exclusion._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_exclusion_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_exclusion") as pre: - pre.assert_not_called() - pb_message = logging_config.DeleteExclusionRequest.pb(logging_config.DeleteExclusionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging_config.DeleteExclusionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteExclusionRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_exclusion(request) - - -def test_delete_exclusion_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/exclusions/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_exclusion(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=*/*/exclusions/*}" % client.transport._host, args[1]) - - -def test_delete_exclusion_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name='name_value', - ) - - -def test_delete_exclusion_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.GetCmekSettingsRequest, - dict, -]) -def test_get_cmek_settings_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.CmekSettings.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_cmek_settings(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' - - -def test_get_cmek_settings_rest_required_fields(request_type=logging_config.GetCmekSettingsRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cmek_settings._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cmek_settings._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.CmekSettings() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.CmekSettings.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_cmek_settings(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_cmek_settings_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_cmek_settings._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_cmek_settings_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_cmek_settings") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_cmek_settings") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetCmekSettingsRequest.pb(logging_config.GetCmekSettingsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.CmekSettings.to_json(logging_config.CmekSettings()) - - request = logging_config.GetCmekSettingsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.CmekSettings() - - client.get_cmek_settings(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_cmek_settings_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetCmekSettingsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_cmek_settings(request) - - -def test_get_cmek_settings_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateCmekSettingsRequest, - dict, -]) -def test_update_cmek_settings_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2'} - request_init["cmek_settings"] = {'name': 'name_value', 'kms_key_name': 'kms_key_name_value', 'service_account_id': 'service_account_id_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.CmekSettings.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_cmek_settings(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' - - -def test_update_cmek_settings_rest_required_fields(request_type=logging_config.UpdateCmekSettingsRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_cmek_settings._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_cmek_settings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.CmekSettings() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.CmekSettings.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_cmek_settings(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_cmek_settings_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_cmek_settings._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("name", "cmekSettings", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_cmek_settings_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_cmek_settings") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_cmek_settings") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateCmekSettingsRequest.pb(logging_config.UpdateCmekSettingsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.CmekSettings.to_json(logging_config.CmekSettings()) - - request = logging_config.UpdateCmekSettingsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.CmekSettings() - - client.update_cmek_settings(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_cmek_settings_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateCmekSettingsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2'} - request_init["cmek_settings"] = {'name': 'name_value', 'kms_key_name': 'kms_key_name_value', 'service_account_id': 'service_account_id_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_cmek_settings(request) - - -def test_update_cmek_settings_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ConfigServiceV2Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, - transports.ConfigServiceV2RestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = ConfigServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ConfigServiceV2GrpcTransport, - ) - -def test_config_service_v2_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ConfigServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_config_service_v2_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.ConfigServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_buckets', - 'get_bucket', - 'create_bucket', - 'update_bucket', - 'delete_bucket', - 'undelete_bucket', - 'list_views', - 'get_view', - 'create_view', - 'update_view', - 'delete_view', - 'list_sinks', - 'get_sink', - 'create_sink', - 'update_sink', - 'delete_sink', - 'list_exclusions', - 'get_exclusion', - 'create_exclusion', - 'update_exclusion', - 'delete_exclusion', - 'get_cmek_settings', - 'update_cmek_settings', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_config_service_v2_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ConfigServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), - quota_project_id="octopus", - ) - - -def test_config_service_v2_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ConfigServiceV2Transport() - adc.assert_called_once() - - -def test_config_service_v2_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ConfigServiceV2Client() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, - ], -) -def test_config_service_v2_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, - transports.ConfigServiceV2RestTransport, - ], -) -def test_config_service_v2_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ConfigServiceV2GrpcTransport, grpc_helpers), - (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), - scopes=["1", "2"], - default_host="logging.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_config_service_v2_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.ConfigServiceV2RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_config_service_v2_host_no_port(transport_name): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_config_service_v2_host_with_port(transport_name): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'logging.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_config_service_v2_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ConfigServiceV2Client( - credentials=creds1, - transport=transport_name, - ) - client2 = ConfigServiceV2Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_buckets._session - session2 = client2.transport.list_buckets._session - assert session1 != session2 - session1 = client1.transport.get_bucket._session - session2 = client2.transport.get_bucket._session - assert session1 != session2 - session1 = client1.transport.create_bucket._session - session2 = client2.transport.create_bucket._session - assert session1 != session2 - session1 = client1.transport.update_bucket._session - session2 = client2.transport.update_bucket._session - assert session1 != session2 - session1 = client1.transport.delete_bucket._session - session2 = client2.transport.delete_bucket._session - assert session1 != session2 - session1 = client1.transport.undelete_bucket._session - session2 = client2.transport.undelete_bucket._session - assert session1 != session2 - session1 = client1.transport.list_views._session - session2 = client2.transport.list_views._session - assert session1 != session2 - session1 = client1.transport.get_view._session - session2 = client2.transport.get_view._session - assert session1 != session2 - session1 = client1.transport.create_view._session - session2 = client2.transport.create_view._session - assert session1 != session2 - session1 = client1.transport.update_view._session - session2 = client2.transport.update_view._session - assert session1 != session2 - session1 = client1.transport.delete_view._session - session2 = client2.transport.delete_view._session - assert session1 != session2 - session1 = client1.transport.list_sinks._session - session2 = client2.transport.list_sinks._session - assert session1 != session2 - session1 = client1.transport.get_sink._session - session2 = client2.transport.get_sink._session - assert session1 != session2 - session1 = client1.transport.create_sink._session - session2 = client2.transport.create_sink._session - assert session1 != session2 - session1 = client1.transport.update_sink._session - session2 = client2.transport.update_sink._session - assert session1 != session2 - session1 = client1.transport.delete_sink._session - session2 = client2.transport.delete_sink._session - assert session1 != session2 - session1 = client1.transport.list_exclusions._session - session2 = client2.transport.list_exclusions._session - assert session1 != session2 - session1 = client1.transport.get_exclusion._session - session2 = client2.transport.get_exclusion._session - assert session1 != session2 - session1 = client1.transport.create_exclusion._session - session2 = client2.transport.create_exclusion._session - assert session1 != session2 - session1 = client1.transport.update_exclusion._session - session2 = client2.transport.update_exclusion._session - assert session1 != session2 - session1 = client1.transport.delete_exclusion._session - session2 = client2.transport.delete_exclusion._session - assert session1 != session2 - session1 = client1.transport.get_cmek_settings._session - session2 = client2.transport.get_cmek_settings._session - assert session1 != session2 - session1 = client1.transport.update_cmek_settings._session - session2 = client2.transport.update_cmek_settings._session - assert session1 != session2 -def test_config_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ConfigServiceV2GrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_config_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cmek_settings_path(): - project = "squid" - expected = "projects/{project}/cmekSettings".format(project=project, ) - actual = ConfigServiceV2Client.cmek_settings_path(project) - assert expected == actual - - -def test_parse_cmek_settings_path(): - expected = { - "project": "clam", - } - path = ConfigServiceV2Client.cmek_settings_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_cmek_settings_path(path) - assert expected == actual - -def test_log_bucket_path(): - project = "whelk" - location = "octopus" - bucket = "oyster" - expected = "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) - actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) - assert expected == actual - - -def test_parse_log_bucket_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "bucket": "mussel", - } - path = ConfigServiceV2Client.log_bucket_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_bucket_path(path) - assert expected == actual + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_log_exclusion_path(): - project = "winkle" - exclusion = "nautilus" - expected = "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) - actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) - assert expected == actual +def test_get_operation(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) -def test_parse_log_exclusion_path(): - expected = { - "project": "scallop", - "exclusion": "abalone", - } - path = ConfigServiceV2Client.log_exclusion_path(**expected) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_exclusion_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_log_sink_path(): - project = "squid" - sink = "clam" - expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) - actual = ConfigServiceV2Client.log_sink_path(project, sink) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() -def test_parse_log_sink_path(): - expected = { - "project": "whelk", - "sink": "octopus", - } - path = ConfigServiceV2Client.log_sink_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_sink_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_log_view_path(): - project = "oyster" - location = "nudibranch" - bucket = "cuttlefish" - view = "mussel" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) - actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) - assert expected == actual +def test_get_operation_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_log_view_path(): - expected = { - "project": "winkle", - "location": "nautilus", - "bucket": "scallop", - "view": "abalone", - } - path = ConfigServiceV2Client.log_view_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_view_path(path) - assert expected == actual + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ConfigServiceV2Client.common_billing_account_path(billing_account) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = ConfigServiceV2Client.common_billing_account_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_billing_account_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = ConfigServiceV2Client.common_folder_path(folder) - assert expected == actual +def test_get_operation_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = ConfigServiceV2Client.common_folder_path(**expected) - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_folder_path(path) - assert expected == actual +def test_list_operations(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ConfigServiceV2Client.common_organization_path(organization) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = ConfigServiceV2Client.common_organization_path(**expected) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_organization_path(path) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = ConfigServiceV2Client.common_project_path(project) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = ConfigServiceV2Client.common_project_path(**expected) +def test_list_operations_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_project_path(path) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ConfigServiceV2Client.common_location_path(project, location) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ConfigServiceV2Client.common_location_path(**expected) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_location_path(path) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) +def test_list_operations_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() - with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: - transport_class = ConfigServiceV2Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + response = client.list_operations( + request={ + "name": "locations", + } ) - prep.assert_called_once_with(client_info) - + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_list_operations_from_dict_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() def test_transport_close(): transports = { - "rest": "_session", "grpc": "_grpc_channel", } @@ -11912,7 +9212,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ - 'rest', 'grpc', ] for transport in transports: diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e477e51adc..4005872a75 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -23,17 +23,10 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options @@ -52,6 +45,7 @@ from google.cloud.logging_v2.types import logging from google.logging.type import http_request_pb2 # type: ignore from google.logging.type import log_severity_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -89,7 +83,6 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), (LoggingServiceV2AsyncClient, "grpc_asyncio"), - (LoggingServiceV2Client, "rest"), ]) def test_logging_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -102,16 +95,12 @@ def test_logging_service_v2_client_from_service_account_info(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.LoggingServiceV2GrpcTransport, "grpc"), (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.LoggingServiceV2RestTransport, "rest"), ]) def test_logging_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -128,7 +117,6 @@ def test_logging_service_v2_client_service_account_always_use_jwt(transport_clas @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), (LoggingServiceV2AsyncClient, "grpc_asyncio"), - (LoggingServiceV2Client, "rest"), ]) def test_logging_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -144,9 +132,6 @@ def test_logging_service_v2_client_from_service_account_file(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @@ -154,7 +139,6 @@ def test_logging_service_v2_client_get_transport_class(): transport = LoggingServiceV2Client.get_transport_class() available_transports = [ transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2RestTransport, ] assert transport in available_transports @@ -165,7 +149,6 @@ def test_logging_service_v2_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest"), ]) @mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) @mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) @@ -285,8 +268,6 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest", "true"), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest", "false"), ]) @mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) @mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) @@ -424,7 +405,6 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest"), ]) def test_logging_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -449,7 +429,6 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest", None), ]) def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -2119,1379 +2098,176 @@ async def test_tail_log_entries_async_from_dict(): await test_tail_log_entries_async(request_type=dict) -@pytest.mark.parametrize("request_type", [ - logging.DeleteLogRequest, - dict, -]) -def test_delete_log_rest(request_type): - client = LoggingServiceV2Client( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {'log_name': 'projects/sample1/logs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_log(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_log_rest_required_fields(request_type=logging.DeleteLogRequest): - transport_class = transports.LoggingServiceV2RestTransport - - request_init = {} - request_init["log_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["logName"] = 'log_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "logName" in jsonified_request - assert jsonified_request["logName"] == 'log_name_value' - - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_log(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_log_rest_unset_required_fields(): - transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_log._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("logName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_log_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_delete_log") as pre: - pre.assert_not_called() - pb_message = logging.DeleteLogRequest.pb(logging.DeleteLogRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging.DeleteLogRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_log(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_log_rest_bad_request(transport: str = 'rest', request_type=logging.DeleteLogRequest): - client = LoggingServiceV2Client( + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # send a request that will satisfy transcoding - request_init = {'log_name': 'projects/sample1/logs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_log(request) - - -def test_delete_log_rest_flattened(): - client = LoggingServiceV2Client( + # It is an error to provide an api_key and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'log_name': 'projects/sample1/logs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - log_name='log_name_value', + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options=options, + transport=transport, ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_log(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{log_name=projects/*/logs/*}" % client.transport._host, args[1]) + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) -def test_delete_log_rest_flattened_error(transport: str = 'rest'): - client = LoggingServiceV2Client( + # It is an error to provide scopes and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - client.delete_log( - logging.DeleteLogRequest(), - log_name='log_name_value', + client = LoggingServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, ) -def test_delete_log_rest_error(): - client = LoggingServiceV2Client( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' ) + client = LoggingServiceV2Client(transport=transport) + assert client.transport is transport - -@pytest.mark.parametrize("request_type", [ - logging.WriteLogEntriesRequest, - dict, -]) -def test_write_log_entries_rest(request_type): - client = LoggingServiceV2Client( +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + channel = transport.grpc_channel + assert channel - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.WriteLogEntriesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.WriteLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.write_log_entries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging.WriteLogEntriesResponse) - - -def test_write_log_entries_rest_required_fields(request_type=logging.WriteLogEntriesRequest): - transport_class = transports.LoggingServiceV2RestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).write_log_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).write_log_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LoggingServiceV2Client( + transport = transports.LoggingServiceV2GrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging.WriteLogEntriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging.WriteLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.write_log_entries(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_write_log_entries_rest_unset_required_fields(): - transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.write_log_entries._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("entries", ))) + ) + channel = transport.grpc_channel + assert channel +@pytest.mark.parametrize("transport_class", [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_write_log_entries_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), - ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_write_log_entries") as post, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_write_log_entries") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging.WriteLogEntriesRequest.pb(logging.WriteLogEntriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging.WriteLogEntriesResponse.to_json(logging.WriteLogEntriesResponse()) - - request = logging.WriteLogEntriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging.WriteLogEntriesResponse() - - client.write_log_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_write_log_entries_rest_bad_request(transport: str = 'rest', request_type=logging.WriteLogEntriesRequest): - client = LoggingServiceV2Client( +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = LoggingServiceV2Client.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.write_log_entries(request) - - -def test_write_log_entries_rest_flattened(): +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + ) + assert isinstance( + client.transport, + transports.LoggingServiceV2GrpcTransport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.WriteLogEntriesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], +def test_logging_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.WriteLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - client.write_log_entries(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/entries:write" % client.transport._host, args[1]) - - -def test_write_log_entries_rest_flattened_error(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.write_log_entries( - logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], +def test_logging_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), ) - -def test_write_log_entries_rest_error(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete_log', + 'write_log_entries', + 'list_log_entries', + 'list_monitored_resource_descriptors', + 'list_logs', + 'tail_log_entries', + 'get_operation', + 'cancel_operation', + 'list_operations', ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.parametrize("request_type", [ - logging.ListLogEntriesRequest, - dict, -]) -def test_list_log_entries_rest(request_type): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_log_entries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_log_entries_rest_required_fields(request_type=logging.ListLogEntriesRequest): - transport_class = transports.LoggingServiceV2RestTransport - - request_init = {} - request_init["resource_names"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resourceNames"] = 'resource_names_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resourceNames" in jsonified_request - assert jsonified_request["resourceNames"] == 'resource_names_value' - - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging.ListLogEntriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging.ListLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_log_entries(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_log_entries_rest_unset_required_fields(): - transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_log_entries._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resourceNames", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_log_entries_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), - ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_list_log_entries") as post, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_list_log_entries") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging.ListLogEntriesRequest.pb(logging.ListLogEntriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging.ListLogEntriesResponse.to_json(logging.ListLogEntriesResponse()) - - request = logging.ListLogEntriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging.ListLogEntriesResponse() - - client.list_log_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_log_entries_rest_bad_request(transport: str = 'rest', request_type=logging.ListLogEntriesRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_log_entries(request) - - -def test_list_log_entries_rest_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListLogEntriesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_log_entries(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/entries:list" % client.transport._host, args[1]) - - -def test_list_log_entries_rest_flattened_error(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_log_entries( - logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', - ) - - -def test_list_log_entries_rest_pager(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - next_page_token='abc', - ), - logging.ListLogEntriesResponse( - entries=[], - next_page_token='def', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - ], - next_page_token='ghi', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging.ListLogEntriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {} - - pager = client.list_log_entries(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, log_entry.LogEntry) - for i in results) - - pages = list(client.list_log_entries(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - logging.ListMonitoredResourceDescriptorsRequest, - dict, -]) -def test_list_monitored_resource_descriptors_rest(request_type): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListMonitoredResourceDescriptorsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_monitored_resource_descriptors(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_monitored_resource_descriptors_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), - ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_list_monitored_resource_descriptors") as post, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_list_monitored_resource_descriptors") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging.ListMonitoredResourceDescriptorsRequest.pb(logging.ListMonitoredResourceDescriptorsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging.ListMonitoredResourceDescriptorsResponse.to_json(logging.ListMonitoredResourceDescriptorsResponse()) - - request = logging.ListMonitoredResourceDescriptorsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging.ListMonitoredResourceDescriptorsResponse() - - client.list_monitored_resource_descriptors(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_monitored_resource_descriptors_rest_bad_request(transport: str = 'rest', request_type=logging.ListMonitoredResourceDescriptorsRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_monitored_resource_descriptors(request) - - -def test_list_monitored_resource_descriptors_rest_pager(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging.ListMonitoredResourceDescriptorsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {} - - pager = client.list_monitored_resource_descriptors(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in results) - - pages = list(client.list_monitored_resource_descriptors(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - logging.ListLogsRequest, - dict, -]) -def test_list_logs_rest(request_type): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListLogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_logs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' - - -def test_list_logs_rest_required_fields(request_type=logging.ListLogsRequest): - transport_class = transports.LoggingServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_logs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_logs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "resource_names", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging.ListLogsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging.ListLogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_logs(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_logs_rest_unset_required_fields(): - transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_logs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "resourceNames", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_logs_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), - ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_list_logs") as post, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_list_logs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging.ListLogsRequest.pb(logging.ListLogsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging.ListLogsResponse.to_json(logging.ListLogsResponse()) - - request = logging.ListLogsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging.ListLogsResponse() - - client.list_logs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_logs_rest_bad_request(transport: str = 'rest', request_type=logging.ListLogsRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_logs(request) - - -def test_list_logs_rest_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListLogsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListLogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_logs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/logs" % client.transport._host, args[1]) - - -def test_list_logs_rest_flattened_error(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_logs( - logging.ListLogsRequest(), - parent='parent_value', - ) - - -def test_list_logs_rest_pager(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging.ListLogsResponse( - log_names=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - logging.ListLogsResponse( - log_names=[], - next_page_token='def', - ), - logging.ListLogsResponse( - log_names=[ - str(), - ], - next_page_token='ghi', - ), - logging.ListLogsResponse( - log_names=[ - str(), - str(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging.ListLogsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_logs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) - - pages = list(client.list_logs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_tail_log_entries_rest_unimplemented(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = logging.TailLogEntriesRequest() - requests = [request] - with pytest.raises(NotImplementedError): - client.tail_log_entries(requests) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LoggingServiceV2Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LoggingServiceV2Client( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LoggingServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LoggingServiceV2Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LoggingServiceV2Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2GrpcAsyncIOTransport, - transports.LoggingServiceV2RestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = LoggingServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.LoggingServiceV2GrpcTransport, - ) - -def test_logging_service_v2_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LoggingServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_logging_service_v2_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.LoggingServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'delete_log', - 'write_log_entries', - 'list_log_entries', - 'list_monitored_resource_descriptors', - 'list_logs', - 'tail_log_entries', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_logging_service_v2_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LoggingServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - quota_project_id="octopus", +def test_logging_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", ) @@ -3547,7 +2323,6 @@ def test_logging_service_v2_transport_auth_adc(transport_class): [ transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport, - transports.LoggingServiceV2RestTransport, ], ) def test_logging_service_v2_transport_auth_gdch_credentials(transport_class): @@ -3648,20 +2423,10 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) -def test_logging_service_v2_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.LoggingServiceV2RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( @@ -3671,14 +2436,11 @@ def test_logging_service_v2_host_no_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( @@ -3688,42 +2450,8 @@ def test_logging_service_v2_host_with_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com:8000' ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_logging_service_v2_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LoggingServiceV2Client( - credentials=creds1, - transport=transport_name, - ) - client2 = LoggingServiceV2Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.delete_log._session - session2 = client2.transport.delete_log._session - assert session1 != session2 - session1 = client1.transport.write_log_entries._session - session2 = client2.transport.write_log_entries._session - assert session1 != session2 - session1 = client1.transport.list_log_entries._session - session2 = client2.transport.list_log_entries._session - assert session1 != session2 - session1 = client1.transport.list_monitored_resource_descriptors._session - session2 = client2.transport.list_monitored_resource_descriptors._session - assert session1 != session2 - session1 = client1.transport.list_logs._session - session2 = client2.transport.list_logs._session - assert session1 != session2 - session1 = client1.transport.tail_log_entries._session - session2 = client2.transport.tail_log_entries._session - assert session1 != session2 def test_logging_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -3811,169 +2539,555 @@ def test_logging_service_v2_transport_channel_mtls_with_adc( grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_path(): + project = "squid" + log = "clam" + expected = "projects/{project}/logs/{log}".format(project=project, log=log, ) + actual = LoggingServiceV2Client.log_path(project, log) + assert expected == actual + + +def test_parse_log_path(): + expected = { + "project": "whelk", + "log": "octopus", + } + path = LoggingServiceV2Client.log_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_log_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LoggingServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LoggingServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = LoggingServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LoggingServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LoggingServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LoggingServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = LoggingServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LoggingServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LoggingServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LoggingServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = LoggingServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_log_path(): - project = "squid" - log = "clam" - expected = "projects/{project}/logs/{log}".format(project=project, log=log, ) - actual = LoggingServiceV2Client.log_path(project, log) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() -def test_parse_log_path(): - expected = { - "project": "whelk", - "log": "octopus", - } - path = LoggingServiceV2Client.log_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_log_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = LoggingServiceV2Client.common_billing_account_path(billing_account) - assert expected == actual +def test_get_operation_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = LoggingServiceV2Client.common_billing_account_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_billing_account_path(path) - assert expected == actual + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = LoggingServiceV2Client.common_folder_path(folder) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = LoggingServiceV2Client.common_folder_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_folder_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = LoggingServiceV2Client.common_organization_path(organization) - assert expected == actual +def test_get_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = LoggingServiceV2Client.common_organization_path(**expected) +def test_list_operations(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_organization_path(path) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = LoggingServiceV2Client.common_project_path(project) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = LoggingServiceV2Client.common_project_path(**expected) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_project_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = LoggingServiceV2Client.common_location_path(project, location) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_list_operations_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = LoggingServiceV2Client.common_location_path(**expected) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_location_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" - with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: - transport_class = LoggingServiceV2Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() ) - prep.assert_called_once_with(client_info) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_list_operations_from_dict_async(): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() def test_transport_close(): transports = { - "rest": "_session", "grpc": "_grpc_channel", } @@ -3989,7 +3103,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ - 'rest', 'grpc', ] for transport in transports: diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index a28f2b1a25..e7a0798abc 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -23,17 +23,10 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format from google.api import distribution_pb2 # type: ignore from google.api import label_pb2 # type: ignore @@ -52,6 +45,7 @@ from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -87,7 +81,6 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), (MetricsServiceV2AsyncClient, "grpc_asyncio"), - (MetricsServiceV2Client, "rest"), ]) def test_metrics_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -100,16 +93,12 @@ def test_metrics_service_v2_client_from_service_account_info(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.MetricsServiceV2GrpcTransport, "grpc"), (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.MetricsServiceV2RestTransport, "rest"), ]) def test_metrics_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -126,7 +115,6 @@ def test_metrics_service_v2_client_service_account_always_use_jwt(transport_clas @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), (MetricsServiceV2AsyncClient, "grpc_asyncio"), - (MetricsServiceV2Client, "rest"), ]) def test_metrics_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -142,9 +130,6 @@ def test_metrics_service_v2_client_from_service_account_file(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @@ -152,7 +137,6 @@ def test_metrics_service_v2_client_get_transport_class(): transport = MetricsServiceV2Client.get_transport_class() available_transports = [ transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2RestTransport, ] assert transport in available_transports @@ -163,7 +147,6 @@ def test_metrics_service_v2_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest"), ]) @mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) @mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) @@ -283,8 +266,6 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest", "true"), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest", "false"), ]) @mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) @mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) @@ -422,7 +403,6 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest"), ]) def test_metrics_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -447,7 +427,6 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest", None), ]) def test_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -996,6 +975,8 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1011,6 +992,8 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1052,6 +1035,8 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, )) @@ -1067,6 +1052,8 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1244,6 +1231,8 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1259,6 +1248,8 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1300,6 +1291,8 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, )) @@ -1315,6 +1308,8 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1502,6 +1497,8 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1517,6 +1514,8 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1558,6 +1557,8 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, )) @@ -1573,6 +1574,8 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1963,1392 +1966,154 @@ async def test_delete_log_metric_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.ListLogMetricsRequest, - dict, -]) -def test_list_log_metrics_rest(request_type): - client = MetricsServiceV2Client( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.ListLogMetricsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_log_metrics(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_log_metrics_rest_required_fields(request_type=logging_metrics.ListLogMetricsRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_metrics._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_metrics._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_metrics.ListLogMetricsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_metrics.ListLogMetricsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_log_metrics(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_log_metrics_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_log_metrics._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_log_metrics_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_list_log_metrics") as post, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_list_log_metrics") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_metrics.ListLogMetricsRequest.pb(logging_metrics.ListLogMetricsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_metrics.ListLogMetricsResponse.to_json(logging_metrics.ListLogMetricsResponse()) - - request = logging_metrics.ListLogMetricsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_metrics.ListLogMetricsResponse() - - client.list_log_metrics(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_log_metrics_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.ListLogMetricsRequest): - client = MetricsServiceV2Client( + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_log_metrics(request) - - -def test_list_log_metrics_rest_flattened(): - client = MetricsServiceV2Client( + # It is an error to provide an api_key and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.ListLogMetricsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options=options, + transport=transport, ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.ListLogMetricsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_log_metrics(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/metrics" % client.transport._host, args[1]) + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) -def test_list_log_metrics_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( + # It is an error to provide scopes and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - client.list_log_metrics( - logging_metrics.ListLogMetricsRequest(), - parent='parent_value', + client = MetricsServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, ) -def test_list_log_metrics_rest_pager(transport: str = 'rest'): - client = MetricsServiceV2Client( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = MetricsServiceV2Client(transport=transport) + assert client.transport is transport - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - next_page_token='abc', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[], - next_page_token='def', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - ], - next_page_token='ghi', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_metrics.ListLogMetricsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_log_metrics(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_metrics.LogMetric) - for i in results) - - pages = list(client.list_log_metrics(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - logging_metrics.GetLogMetricRequest, - dict, -]) -def test_get_log_metric_rest(request_type): - client = MetricsServiceV2Client( +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + channel = transport.grpc_channel + assert channel - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_log_metric(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -def test_get_log_metric_rest_required_fields(request_type=logging_metrics.GetLogMetricRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["metric_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["metricName"] = 'metric_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "metricName" in jsonified_request - assert jsonified_request["metricName"] == 'metric_name_value' - - client = MetricsServiceV2Client( + transport = transports.MetricsServiceV2GrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_log_metric(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_log_metric_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_log_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("metricName", ))) + ) + channel = transport.grpc_channel + assert channel +@pytest.mark.parametrize("transport_class", [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_log_metric_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_get_log_metric") as post, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_get_log_metric") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_metrics.GetLogMetricRequest.pb(logging_metrics.GetLogMetricRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_metrics.LogMetric.to_json(logging_metrics.LogMetric()) - - request = logging_metrics.GetLogMetricRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_metrics.LogMetric() - - client.get_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.GetLogMetricRequest): - client = MetricsServiceV2Client( +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = MetricsServiceV2Client.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_log_metric(request) - - -def test_get_log_metric_rest_flattened(): +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + ) + assert isinstance( + client.transport, + transports.MetricsServiceV2GrpcTransport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() +def test_metrics_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) - # get arguments that satisfy an http rule for this method - sample_request = {'metric_name': 'projects/sample1/metrics/sample2'} - # get truthy value for each flattened field - mock_args = dict( - metric_name='metric_name_value', +def test_metrics_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_log_metrics', + 'get_log_metric', + 'create_log_metric', + 'update_log_metric', + 'delete_log_metric', + 'get_operation', + 'cancel_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - client.get_log_metric(**mock_args) + with pytest.raises(NotImplementedError): + transport.close() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{metric_name=projects/*/metrics/*}" % client.transport._host, args[1]) - - -def test_get_log_metric_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_log_metric( - logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', - ) - - -def test_get_log_metric_rest_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_metrics.CreateLogMetricRequest, - dict, -]) -def test_create_log_metric_rest(request_type): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_log_metric(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -def test_create_log_metric_rest_required_fields(request_type=logging_metrics.CreateLogMetricRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_log_metric(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_log_metric_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_log_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "metric", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_log_metric_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_create_log_metric") as post, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_create_log_metric") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_metrics.CreateLogMetricRequest.pb(logging_metrics.CreateLogMetricRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_metrics.LogMetric.to_json(logging_metrics.LogMetric()) - - request = logging_metrics.CreateLogMetricRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_metrics.LogMetric() - - client.create_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.CreateLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_log_metric(request) - - -def test_create_log_metric_rest_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_log_metric(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/metrics" % client.transport._host, args[1]) - - -def test_create_log_metric_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_log_metric( - logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - -def test_create_log_metric_rest_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_metrics.UpdateLogMetricRequest, - dict, -]) -def test_update_log_metric_rest(request_type): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_log_metric(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -def test_update_log_metric_rest_required_fields(request_type=logging_metrics.UpdateLogMetricRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["metric_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["metricName"] = 'metric_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "metricName" in jsonified_request - assert jsonified_request["metricName"] == 'metric_name_value' - - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "put", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_log_metric(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_log_metric_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_log_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("metricName", "metric", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_log_metric_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_update_log_metric") as post, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_update_log_metric") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_metrics.UpdateLogMetricRequest.pb(logging_metrics.UpdateLogMetricRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_metrics.LogMetric.to_json(logging_metrics.LogMetric()) - - request = logging_metrics.UpdateLogMetricRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_metrics.LogMetric() - - client.update_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.UpdateLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_log_metric(request) - - -def test_update_log_metric_rest_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - - # get arguments that satisfy an http rule for this method - sample_request = {'metric_name': 'projects/sample1/metrics/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_log_metric(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{metric_name=projects/*/metrics/*}" % client.transport._host, args[1]) - - -def test_update_log_metric_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_log_metric( - logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - -def test_update_log_metric_rest_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_metrics.DeleteLogMetricRequest, - dict, -]) -def test_delete_log_metric_rest(request_type): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_log_metric(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_log_metric_rest_required_fields(request_type=logging_metrics.DeleteLogMetricRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["metric_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["metricName"] = 'metric_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "metricName" in jsonified_request - assert jsonified_request["metricName"] == 'metric_name_value' - - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_log_metric(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_log_metric_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_log_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("metricName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_log_metric_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_delete_log_metric") as pre: - pre.assert_not_called() - pb_message = logging_metrics.DeleteLogMetricRequest.pb(logging_metrics.DeleteLogMetricRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging_metrics.DeleteLogMetricRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.DeleteLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_log_metric(request) - - -def test_delete_log_metric_rest_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'metric_name': 'projects/sample1/metrics/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - metric_name='metric_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_log_metric(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{metric_name=projects/*/metrics/*}" % client.transport._host, args[1]) - - -def test_delete_log_metric_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_log_metric( - logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', - ) - - -def test_delete_log_metric_rest_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricsServiceV2Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetricsServiceV2Client( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetricsServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricsServiceV2Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MetricsServiceV2Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2GrpcAsyncIOTransport, - transports.MetricsServiceV2RestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = MetricsServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MetricsServiceV2GrpcTransport, - ) - -def test_metrics_service_v2_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MetricsServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_metrics_service_v2_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.MetricsServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_log_metrics', - 'get_log_metric', - 'create_log_metric', - 'update_log_metric', - 'delete_log_metric', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() def test_metrics_service_v2_base_transport_with_credentials_file(): @@ -3425,7 +2190,6 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): [ transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport, - transports.MetricsServiceV2RestTransport, ], ) def test_metrics_service_v2_transport_auth_gdch_credentials(transport_class): @@ -3526,20 +2290,10 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) -def test_metrics_service_v2_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MetricsServiceV2RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_metrics_service_v2_host_no_port(transport_name): client = MetricsServiceV2Client( @@ -3549,14 +2303,11 @@ def test_metrics_service_v2_host_no_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_metrics_service_v2_host_with_port(transport_name): client = MetricsServiceV2Client( @@ -3566,39 +2317,8 @@ def test_metrics_service_v2_host_with_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com:8000' ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_metrics_service_v2_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = MetricsServiceV2Client( - credentials=creds1, - transport=transport_name, - ) - client2 = MetricsServiceV2Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_log_metrics._session - session2 = client2.transport.list_log_metrics._session - assert session1 != session2 - session1 = client1.transport.get_log_metric._session - session2 = client2.transport.get_log_metric._session - assert session1 != session2 - session1 = client1.transport.create_log_metric._session - session2 = client2.transport.create_log_metric._session - assert session1 != session2 - session1 = client1.transport.update_log_metric._session - session2 = client2.transport.update_log_metric._session - assert session1 != session2 - session1 = client1.transport.delete_log_metric._session - session2 = client2.transport.delete_log_metric._session - assert session1 != session2 def test_metrics_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -3686,169 +2406,555 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc( grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_metric_path(): + project = "squid" + metric = "clam" + expected = "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + actual = MetricsServiceV2Client.log_metric_path(project, metric) + assert expected == actual + + +def test_parse_log_metric_path(): + expected = { + "project": "whelk", + "metric": "octopus", + } + path = MetricsServiceV2Client.log_metric_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_log_metric_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MetricsServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = MetricsServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = MetricsServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = MetricsServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MetricsServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = MetricsServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = MetricsServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = MetricsServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MetricsServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = MetricsServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = MetricsServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_log_metric_path(): - project = "squid" - metric = "clam" - expected = "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) - actual = MetricsServiceV2Client.log_metric_path(project, metric) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() -def test_parse_log_metric_path(): - expected = { - "project": "whelk", - "metric": "octopus", - } - path = MetricsServiceV2Client.log_metric_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_log_metric_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MetricsServiceV2Client.common_billing_account_path(billing_account) - assert expected == actual +def test_get_operation_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = MetricsServiceV2Client.common_billing_account_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_billing_account_path(path) - assert expected == actual + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = MetricsServiceV2Client.common_folder_path(folder) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = MetricsServiceV2Client.common_folder_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_folder_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MetricsServiceV2Client.common_organization_path(organization) - assert expected == actual +def test_get_operation_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = MetricsServiceV2Client.common_organization_path(**expected) +def test_list_operations(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_organization_path(path) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = MetricsServiceV2Client.common_project_path(project) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = MetricsServiceV2Client.common_project_path(**expected) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_project_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MetricsServiceV2Client.common_location_path(project, location) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_list_operations_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = MetricsServiceV2Client.common_location_path(**expected) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_location_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" - with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: - transport_class = MetricsServiceV2Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() ) - prep.assert_called_once_with(client_info) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_list_operations_from_dict_async(): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() def test_transport_close(): transports = { - "rest": "_session", "grpc": "_grpc_channel", } @@ -3864,7 +2970,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ - 'rest', 'grpc', ] for transport in transports: diff --git a/tests/integration/goldens/redis/docs/_static/custom.css b/tests/integration/goldens/redis/docs/_static/custom.css new file mode 100755 index 0000000000..06423be0b5 --- /dev/null +++ b/tests/integration/goldens/redis/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/tests/integration/goldens/redis/docs/conf.py b/tests/integration/goldens/redis/docs/conf.py index f4c7ab4631..b95d38e026 100755 --- a/tests/integration/goldens/redis/docs/conf.py +++ b/tests/integration/goldens/redis/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/tests/integration/goldens/redis/google/cloud/redis/__init__.py index 71f3599d0a..92a9215362 100755 --- a/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -27,17 +27,26 @@ from google.cloud.redis_v1.types.cloud_redis import FailoverInstanceRequest from google.cloud.redis_v1.types.cloud_redis import GcsDestination from google.cloud.redis_v1.types.cloud_redis import GcsSource +from google.cloud.redis_v1.types.cloud_redis import GetInstanceAuthStringRequest from google.cloud.redis_v1.types.cloud_redis import GetInstanceRequest from google.cloud.redis_v1.types.cloud_redis import ImportInstanceRequest from google.cloud.redis_v1.types.cloud_redis import InputConfig from google.cloud.redis_v1.types.cloud_redis import Instance +from google.cloud.redis_v1.types.cloud_redis import InstanceAuthString from google.cloud.redis_v1.types.cloud_redis import ListInstancesRequest from google.cloud.redis_v1.types.cloud_redis import ListInstancesResponse from google.cloud.redis_v1.types.cloud_redis import LocationMetadata +from google.cloud.redis_v1.types.cloud_redis import MaintenancePolicy +from google.cloud.redis_v1.types.cloud_redis import MaintenanceSchedule +from google.cloud.redis_v1.types.cloud_redis import NodeInfo from google.cloud.redis_v1.types.cloud_redis import OperationMetadata from google.cloud.redis_v1.types.cloud_redis import OutputConfig +from google.cloud.redis_v1.types.cloud_redis import PersistenceConfig +from google.cloud.redis_v1.types.cloud_redis import RescheduleMaintenanceRequest +from google.cloud.redis_v1.types.cloud_redis import TlsCertificate from google.cloud.redis_v1.types.cloud_redis import UpdateInstanceRequest from google.cloud.redis_v1.types.cloud_redis import UpgradeInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import WeeklyMaintenanceWindow from google.cloud.redis_v1.types.cloud_redis import ZoneMetadata __all__ = ('CloudRedisClient', @@ -48,16 +57,25 @@ 'FailoverInstanceRequest', 'GcsDestination', 'GcsSource', + 'GetInstanceAuthStringRequest', 'GetInstanceRequest', 'ImportInstanceRequest', 'InputConfig', 'Instance', + 'InstanceAuthString', 'ListInstancesRequest', 'ListInstancesResponse', 'LocationMetadata', + 'MaintenancePolicy', + 'MaintenanceSchedule', + 'NodeInfo', 'OperationMetadata', 'OutputConfig', + 'PersistenceConfig', + 'RescheduleMaintenanceRequest', + 'TlsCertificate', 'UpdateInstanceRequest', 'UpgradeInstanceRequest', + 'WeeklyMaintenanceWindow', 'ZoneMetadata', ) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index a3273e7a5f..0d4ccb8a35 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -27,17 +27,26 @@ from .types.cloud_redis import FailoverInstanceRequest from .types.cloud_redis import GcsDestination from .types.cloud_redis import GcsSource +from .types.cloud_redis import GetInstanceAuthStringRequest from .types.cloud_redis import GetInstanceRequest from .types.cloud_redis import ImportInstanceRequest from .types.cloud_redis import InputConfig from .types.cloud_redis import Instance +from .types.cloud_redis import InstanceAuthString from .types.cloud_redis import ListInstancesRequest from .types.cloud_redis import ListInstancesResponse from .types.cloud_redis import LocationMetadata +from .types.cloud_redis import MaintenancePolicy +from .types.cloud_redis import MaintenanceSchedule +from .types.cloud_redis import NodeInfo from .types.cloud_redis import OperationMetadata from .types.cloud_redis import OutputConfig +from .types.cloud_redis import PersistenceConfig +from .types.cloud_redis import RescheduleMaintenanceRequest +from .types.cloud_redis import TlsCertificate from .types.cloud_redis import UpdateInstanceRequest from .types.cloud_redis import UpgradeInstanceRequest +from .types.cloud_redis import WeeklyMaintenanceWindow from .types.cloud_redis import ZoneMetadata __all__ = ( @@ -49,16 +58,25 @@ 'FailoverInstanceRequest', 'GcsDestination', 'GcsSource', +'GetInstanceAuthStringRequest', 'GetInstanceRequest', 'ImportInstanceRequest', 'InputConfig', 'Instance', +'InstanceAuthString', 'ListInstancesRequest', 'ListInstancesResponse', 'LocationMetadata', +'MaintenancePolicy', +'MaintenanceSchedule', +'NodeInfo', 'OperationMetadata', 'OutputConfig', +'PersistenceConfig', +'RescheduleMaintenanceRequest', +'TlsCertificate', 'UpdateInstanceRequest', 'UpgradeInstanceRequest', +'WeeklyMaintenanceWindow', 'ZoneMetadata', ) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json b/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json index 804956f477..202306de2c 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json @@ -35,6 +35,11 @@ "get_instance" ] }, + "GetInstanceAuthString": { + "methods": [ + "get_instance_auth_string" + ] + }, "ImportInstance": { "methods": [ "import_instance" @@ -45,6 +50,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -85,6 +95,11 @@ "get_instance" ] }, + "GetInstanceAuthString": { + "methods": [ + "get_instance_auth_string" + ] + }, "ImportInstance": { "methods": [ "import_instance" @@ -95,6 +110,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -135,6 +155,11 @@ "get_instance" ] }, + "GetInstanceAuthString": { + "methods": [ + "get_instance_auth_string" + ] + }, "ImportInstance": { "methods": [ "import_instance" @@ -145,6 +170,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 976bb3bd71..909c48d2db 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -34,8 +34,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -391,7 +393,7 @@ async def sample_get_instance(): Returns: google.cloud.redis_v1.types.Instance: - A Google Cloud Redis instance. + A Memorystore for Redis instance. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -435,6 +437,109 @@ async def sample_get_instance(): # Done; return the response. return response + async def get_instance_auth_string(self, + request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis.InstanceAuthString: + r"""Gets the AUTH string for a Redis instance. If AUTH is + not enabled for the instance the response will be empty. + This information is not included in the details returned + to GetInstance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + async def sample_get_instance_auth_string(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceAuthStringRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_auth_string(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_v1.types.GetInstanceAuthStringRequest, dict]]): + The request object. Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_v1.types.InstanceAuthString: + Instance AUTH string details. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.GetInstanceAuthStringRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance_auth_string, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def create_instance(self, request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, *, @@ -540,8 +645,8 @@ async def sample_create_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -663,6 +768,7 @@ async def sample_update_instance(): - ``labels`` - ``memorySizeGb`` - ``redisConfig`` + - ``replica_count`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -685,8 +791,8 @@ async def sample_update_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -814,8 +920,8 @@ async def sample_upgrade_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -953,8 +1059,8 @@ async def sample_import_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1021,6 +1127,7 @@ async def export_instance(self, r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. + The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. @@ -1088,8 +1195,8 @@ async def sample_export_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1153,7 +1260,7 @@ async def failover_instance(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Initiates a failover of the master node to current + r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -1218,8 +1325,8 @@ async def sample_failover_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1399,6 +1506,451 @@ async def sample_delete_instance(): # Done; return the response. return response + async def reschedule_maintenance(self, + request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, + *, + name: Optional[str] = None, + reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Reschedule maintenance for a given instance in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + async def sample_reschedule_maintenance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_v1.types.RescheduleMaintenanceRequest, dict]]): + The request object. Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (:class:`google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType`): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Optional. Timestamp when the maintenance shall be + rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC + 3339 format, for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.RescheduleMaintenanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reschedule_maintenance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def __aenter__(self) -> "CloudRedisAsyncClient": return self diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 414d578bfe..452651485c 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -37,8 +37,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -585,7 +587,7 @@ def sample_get_instance(): Returns: google.cloud.redis_v1.types.Instance: - A Google Cloud Redis instance. + A Memorystore for Redis instance. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -629,6 +631,109 @@ def sample_get_instance(): # Done; return the response. return response + def get_instance_auth_string(self, + request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis.InstanceAuthString: + r"""Gets the AUTH string for a Redis instance. If AUTH is + not enabled for the instance the response will be empty. + This information is not included in the details returned + to GetInstance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + def sample_get_instance_auth_string(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceAuthStringRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_auth_string(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_v1.types.GetInstanceAuthStringRequest, dict]): + The request object. Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_v1.types.InstanceAuthString: + Instance AUTH string details. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.GetInstanceAuthStringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.GetInstanceAuthStringRequest): + request = cloud_redis.GetInstanceAuthStringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance_auth_string] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def create_instance(self, request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, *, @@ -734,8 +839,8 @@ def sample_create_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -857,6 +962,7 @@ def sample_update_instance(): - ``labels`` - ``memorySizeGb`` - ``redisConfig`` + - ``replica_count`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -879,8 +985,8 @@ def sample_update_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1008,8 +1114,8 @@ def sample_upgrade_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1147,8 +1253,8 @@ def sample_import_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1215,6 +1321,7 @@ def export_instance(self, r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. + The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. @@ -1282,8 +1389,8 @@ def sample_export_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1347,7 +1454,7 @@ def failover_instance(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Initiates a failover of the master node to current + r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -1412,8 +1519,8 @@ def sample_failover_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1593,6 +1700,146 @@ def sample_delete_instance(): # Done; return the response. return response + def reschedule_maintenance(self, + request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, + *, + name: Optional[str] = None, + reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Reschedule maintenance for a given instance in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + def sample_reschedule_maintenance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_v1.types.RescheduleMaintenanceRequest, dict]): + The request object. Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Timestamp when the maintenance shall be + rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC + 3339 format, for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.RescheduleMaintenanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.RescheduleMaintenanceRequest): + request = cloud_redis.RescheduleMaintenanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "CloudRedisClient": return self @@ -1606,10 +1853,310 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 19c96cc87f..c0a9d6bc09 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -27,8 +27,9 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -125,6 +126,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_instance_auth_string: gapic_v1.method.wrap_method( + self.get_instance_auth_string, + default_timeout=600.0, + client_info=client_info, + ), self.create_instance: gapic_v1.method.wrap_method( self.create_instance, default_timeout=600.0, @@ -160,6 +166,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.reschedule_maintenance: gapic_v1.method.wrap_method( + self.reschedule_maintenance, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -194,6 +205,15 @@ def get_instance(self) -> Callable[ ]]: raise NotImplementedError() + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + Union[ + cloud_redis.InstanceAuthString, + Awaitable[cloud_redis.InstanceAuthString] + ]]: + raise NotImplementedError() + @property def create_instance(self) -> Callable[ [cloud_redis.CreateInstanceRequest], @@ -257,6 +277,67 @@ def delete_instance(self) -> Callable[ ]]: raise NotImplementedError() + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 3c22cd0683..7e07797c70 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -25,8 +25,9 @@ import grpc # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO @@ -325,6 +326,35 @@ def get_instance(self) -> Callable[ ) return self._stubs['get_instance'] + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + cloud_redis.InstanceAuthString]: + r"""Return a callable for the get instance auth string method over gRPC. + + Gets the AUTH string for a Redis instance. If AUTH is + not enabled for the instance the response will be empty. + This information is not included in the details returned + to GetInstance. + + Returns: + Callable[[~.GetInstanceAuthStringRequest], + ~.InstanceAuthString]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_auth_string' not in self._stubs: + self._stubs['get_instance_auth_string'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', + request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, + response_deserializer=cloud_redis.InstanceAuthString.deserialize, + ) + return self._stubs['get_instance_auth_string'] + @property def create_instance(self) -> Callable[ [cloud_redis.CreateInstanceRequest], @@ -465,6 +495,7 @@ def export_instance(self) -> Callable[ Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. + The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. @@ -492,7 +523,7 @@ def failover_instance(self) -> Callable[ operations_pb2.Operation]: r"""Return a callable for the failover instance method over gRPC. - Initiates a failover of the master node to current + Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -541,9 +572,144 @@ def delete_instance(self) -> Callable[ ) return self._stubs['delete_instance'] + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Reschedule maintenance for a given instance in a + given project and location. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reschedule_maintenance' not in self._stubs: + self._stubs['reschedule_maintenance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', + request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['reschedule_maintenance'] + def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + @property def kind(self) -> str: return "grpc" diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 806b87287f..354d27092a 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -25,8 +25,9 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO from .grpc import CloudRedisGrpcTransport @@ -328,6 +329,35 @@ def get_instance(self) -> Callable[ ) return self._stubs['get_instance'] + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + Awaitable[cloud_redis.InstanceAuthString]]: + r"""Return a callable for the get instance auth string method over gRPC. + + Gets the AUTH string for a Redis instance. If AUTH is + not enabled for the instance the response will be empty. + This information is not included in the details returned + to GetInstance. + + Returns: + Callable[[~.GetInstanceAuthStringRequest], + Awaitable[~.InstanceAuthString]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_auth_string' not in self._stubs: + self._stubs['get_instance_auth_string'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', + request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, + response_deserializer=cloud_redis.InstanceAuthString.deserialize, + ) + return self._stubs['get_instance_auth_string'] + @property def create_instance(self) -> Callable[ [cloud_redis.CreateInstanceRequest], @@ -468,6 +498,7 @@ def export_instance(self) -> Callable[ Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. + The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. @@ -495,7 +526,7 @@ def failover_instance(self) -> Callable[ Awaitable[operations_pb2.Operation]]: r"""Return a callable for the failover instance method over gRPC. - Initiates a failover of the master node to current + Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -544,9 +575,144 @@ def delete_instance(self) -> Callable[ ) return self._stubs['delete_instance'] + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Reschedule maintenance for a given instance in a + given project and location. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reschedule_maintenance' not in self._stubs: + self._stubs['reschedule_maintenance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', + request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['reschedule_maintenance'] + def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + __all__ = ( 'CloudRedisGrpcAsyncIOTransport', diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 0908104c60..57b93dcbc8 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -28,6 +28,7 @@ from google.protobuf import json_format from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses import re @@ -108,6 +109,14 @@ def post_get_instance(self, response): logging.log(f"Received response: {response}") return response + def pre_get_instance_auth_string(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance_auth_string(self, response): + logging.log(f"Received response: {response}") + return response + def pre_import_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -124,6 +133,14 @@ def post_list_instances(self, response): logging.log(f"Received response: {response}") return response + def pre_reschedule_maintenance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reschedule_maintenance(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -220,6 +237,22 @@ def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Se def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance_auth_string + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: + """Post-rpc interceptor for get_instance_auth_string + Override in a subclass to manipulate the response after it is returned by the CloudRedis server but before it is returned to user code. @@ -252,6 +285,22 @@ def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for reschedule_maintenance + Override in a subclass to manipulate the response after it is returned by the CloudRedis server but before it is returned to user code. @@ -290,6 +339,127 @@ def post_upgrade_instance(self, response: operations_pb2.Operation) -> operation """ return response + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class CloudRedisRestStub: @@ -421,6 +591,30 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], } rest_transport = operations_v1.OperationsRestTransport( @@ -806,7 +1000,7 @@ def __call__(self, Returns: ~.cloud_redis.Instance: - A Google Cloud Redis instance. + A Memorystore for Redis instance. """ http_options: List[Dict[str, str]] = [{ @@ -852,6 +1046,83 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) return resp + class _GetInstanceAuthString(CloudRedisRestStub): + def __hash__(self): + return hash("GetInstanceAuthString") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.GetInstanceAuthStringRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloud_redis.InstanceAuthString: + r"""Call the get instance auth string method over HTTP. + + Args: + request (~.cloud_redis.GetInstanceAuthStringRequest): + The request object. Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_redis.InstanceAuthString: + Instance AUTH string details. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}/authString', + }, + ] + request, metadata = self._interceptor.pre_get_instance_auth_string(request, metadata) + pb_request = cloud_redis.GetInstanceAuthStringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_redis.InstanceAuthString() + pb_resp = cloud_redis.InstanceAuthString.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance_auth_string(resp) + return resp + class _ImportInstance(CloudRedisRestStub): def __hash__(self): return hash("ImportInstance") @@ -1018,6 +1289,93 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) return resp + class _RescheduleMaintenance(CloudRedisRestStub): + def __hash__(self): + return hash("RescheduleMaintenance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.RescheduleMaintenanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the reschedule maintenance method over HTTP. + + Args: + request (~.cloud_redis.RescheduleMaintenanceRequest): + The request object. Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_reschedule_maintenance(request, metadata) + pb_request = cloud_redis.RescheduleMaintenanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reschedule_maintenance(resp) + return resp + class _UpdateInstance(CloudRedisRestStub): def __hash__(self): return hash("UpdateInstance") @@ -1232,6 +1590,14 @@ def get_instance(self) -> Callable[ # In C++ this would require a dynamic_cast return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + cloud_redis.InstanceAuthString]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore + @property def import_instance(self) -> Callable[ [cloud_redis.ImportInstanceRequest], @@ -1248,6 +1614,14 @@ def list_instances(self) -> Callable[ # In C++ this would require a dynamic_cast return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + @property def update_instance(self) -> Callable[ [cloud_redis.UpdateInstanceRequest], @@ -1264,6 +1638,384 @@ def upgrade_instance(self) -> Callable[ # In C++ this would require a dynamic_cast return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudRedisRestStub): + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudRedisRestStub): + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(CloudRedisRestStub): + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(CloudRedisRestStub): + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(CloudRedisRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(CloudRedisRestStub): + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py index f616df1f68..a0b7fdcdc4 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py @@ -20,17 +20,26 @@ FailoverInstanceRequest, GcsDestination, GcsSource, + GetInstanceAuthStringRequest, GetInstanceRequest, ImportInstanceRequest, InputConfig, Instance, + InstanceAuthString, ListInstancesRequest, ListInstancesResponse, LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, + NodeInfo, OperationMetadata, OutputConfig, + PersistenceConfig, + RescheduleMaintenanceRequest, + TlsCertificate, UpdateInstanceRequest, UpgradeInstanceRequest, + WeeklyMaintenanceWindow, ZoneMetadata, ) @@ -41,16 +50,25 @@ 'FailoverInstanceRequest', 'GcsDestination', 'GcsSource', + 'GetInstanceAuthStringRequest', 'GetInstanceRequest', 'ImportInstanceRequest', 'InputConfig', 'Instance', + 'InstanceAuthString', 'ListInstancesRequest', 'ListInstancesResponse', 'LocationMetadata', + 'MaintenancePolicy', + 'MaintenanceSchedule', + 'NodeInfo', 'OperationMetadata', 'OutputConfig', + 'PersistenceConfig', + 'RescheduleMaintenanceRequest', + 'TlsCertificate', 'UpdateInstanceRequest', 'UpgradeInstanceRequest', + 'WeeklyMaintenanceWindow', 'ZoneMetadata', ) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index d0e12a5796..234065b45b 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -19,17 +19,28 @@ import proto # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore __protobuf__ = proto.module( package='google.cloud.redis.v1', manifest={ + 'NodeInfo', 'Instance', + 'PersistenceConfig', + 'RescheduleMaintenanceRequest', + 'MaintenancePolicy', + 'WeeklyMaintenanceWindow', + 'MaintenanceSchedule', 'ListInstancesRequest', 'ListInstancesResponse', 'GetInstanceRequest', + 'GetInstanceAuthStringRequest', + 'InstanceAuthString', 'CreateInstanceRequest', 'UpdateInstanceRequest', 'UpgradeInstanceRequest', @@ -44,12 +55,34 @@ 'OperationMetadata', 'LocationMetadata', 'ZoneMetadata', + 'TlsCertificate', }, ) +class NodeInfo(proto.Message): + r"""Node specific properties. + + Attributes: + id (str): + Output only. Node identifying string. e.g. + 'node-0', 'node-1' + zone (str): + Output only. Location of the node. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + class Instance(proto.Message): - r"""A Google Cloud Redis instance. + r"""A Memorystore for Redis instance. Attributes: name (str): @@ -73,19 +106,21 @@ class Instance(proto.Message): Resource labels to represent user provided metadata location_id (str): - Optional. The zone where the instance will be provisioned. - If not provided, the service will choose a zone for the - instance. For STANDARD_HA tier, instances will be created - across two zones for protection against zonal failures. If - [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id] - is also provided, it must be different from - [location_id][google.cloud.redis.v1.Instance.location_id]. + Optional. The zone where the instance will be + provisioned. If not provided, the service will + choose a zone from the specified region for the + instance. For standard tier, additional nodes + will be added across multiple zones for + protection against zonal failures. If specified, + at least one node will be provisioned in this + zone. alternative_location_id (str): - Optional. Only applicable to STANDARD_HA tier which protects - the instance against zonal failures by provisioning it - across two zones. If provided, it must be a different zone - from the one provided in - [location_id][google.cloud.redis.v1.Instance.location_id]. + Optional. If specified, at least one node will be + provisioned in this zone in addition to the zone specified + in location_id. Only applicable to standard tier. If + provided, it must be a different zone from the one provided + in [location_id]. Additional nodes beyond the first 2 will + be placed in zones selected by the service. redis_version (str): Optional. The version of Redis software. If not provided, latest supported version will be used. Currently, the @@ -94,14 +129,24 @@ class Instance(proto.Message): - ``REDIS_3_2`` for Redis 3.2 compatibility - ``REDIS_4_0`` for Redis 4.0 compatibility (default) - ``REDIS_5_0`` for Redis 5.0 compatibility + - ``REDIS_6_X`` for Redis 6.x compatibility reserved_ip_range (str): - Optional. The CIDR range of internal - addresses that are reserved for this instance. - If not provided, the service will choose an - unused /29 block, for example, 10.0.0.0/29 or - 192.168.0.0/29. Ranges must be unique and - non-overlapping with existing subnets in an - authorized network. + Optional. For DIRECT_PEERING mode, the CIDR range of + internal addresses that are reserved for this instance. + Range must be unique and non-overlapping with existing + subnets in an authorized network. For PRIVATE_SERVICE_ACCESS + mode, the name of one allocated IP address ranges associated + with this private service access connection. If not + provided, the service will choose an unused /29 block, for + example, 10.0.0.0/29 or 192.168.0.0/29. For + READ_REPLICAS_ENABLED the default block size is /28. + secondary_ip_range (str): + Optional. Additional IP range for node placement. Required + when enabling read replicas on an existing instance. For + DIRECT_PEERING mode value must be a CIDR range of size /28, + or "auto". For PRIVATE_SERVICE_ACCESS mode value must be the + name of an allocated address range associated with the + private service access connection, or "auto". host (str): Output only. Hostname or IP address of the exposed Redis endpoint used by clients to @@ -110,15 +155,10 @@ class Instance(proto.Message): Output only. The port number of the exposed Redis endpoint. current_location_id (str): - Output only. The current zone where the Redis endpoint is - placed. For Basic Tier instances, this will always be the - same as the - [location_id][google.cloud.redis.v1.Instance.location_id] - provided by the user at creation time. For Standard Tier - instances, this can be either - [location_id][google.cloud.redis.v1.Instance.location_id] or - [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id] - and can change after a failover event. + Output only. The current zone where the Redis primary node + is located. In basic tier, this will always be the same as + [location_id]. In standard tier, this can be the zone of any + node in the instance. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the instance was created. @@ -167,6 +207,64 @@ class Instance(proto.Message): connect_mode (google.cloud.redis_v1.types.Instance.ConnectMode): Optional. The network connect mode of the Redis instance. If not provided, the connect mode defaults to DIRECT_PEERING. + auth_enabled (bool): + Optional. Indicates whether OSS Redis AUTH is + enabled for the instance. If set to "true" AUTH + is enabled on the instance. Default value is + "false" meaning AUTH is disabled. + server_ca_certs (MutableSequence[google.cloud.redis_v1.types.TlsCertificate]): + Output only. List of server CA certificates + for the instance. + transit_encryption_mode (google.cloud.redis_v1.types.Instance.TransitEncryptionMode): + Optional. The TLS mode of the Redis instance. + If not provided, TLS is disabled for the + instance. + maintenance_policy (google.cloud.redis_v1.types.MaintenancePolicy): + Optional. The maintenance policy for the + instance. If not provided, maintenance events + can be performed at any time. + maintenance_schedule (google.cloud.redis_v1.types.MaintenanceSchedule): + Output only. Date and time of upcoming + maintenance events which have been scheduled. + replica_count (int): + Optional. The number of replica nodes. The valid range for + the Standard Tier with read replicas enabled is [1-5] and + defaults to 2. If read replicas are not enabled for a + Standard Tier instance, the only valid value is 1 and the + default is 1. The valid value for basic tier is 0 and the + default is also 0. + nodes (MutableSequence[google.cloud.redis_v1.types.NodeInfo]): + Output only. Info per node. + read_endpoint (str): + Output only. Hostname or IP address of the + exposed readonly Redis endpoint. Standard tier + only. Targets all healthy replica nodes in + instance. Replication is asynchronous and + replica nodes will exhibit some lag behind the + primary. Write requests must target 'host'. + read_endpoint_port (int): + Output only. The port number of the exposed + readonly redis endpoint. Standard tier only. + Write requests should target 'port'. + read_replicas_mode (google.cloud.redis_v1.types.Instance.ReadReplicasMode): + Optional. Read replicas mode for the instance. Defaults to + READ_REPLICAS_DISABLED. + customer_managed_key (str): + Optional. The KMS key reference that the + customer provides when trying to create the + instance. + persistence_config (google.cloud.redis_v1.types.PersistenceConfig): + Optional. Persistence configuration + parameters + suspension_reasons (MutableSequence[google.cloud.redis_v1.types.Instance.SuspensionReason]): + Optional. reasons that causes instance in + "SUSPENDED" state. + maintenance_version (str): + Optional. The self service update maintenance version. The + version is date based such as "20210712_00_00". + available_maintenance_versions (MutableSequence[str]): + Optional. The available maintenance versions + that an instance could update to. """ class State(proto.Enum): r"""Represents the different states of a Redis instance. @@ -243,6 +341,56 @@ class ConnectMode(proto.Enum): DIRECT_PEERING = 1 PRIVATE_SERVICE_ACCESS = 2 + class TransitEncryptionMode(proto.Enum): + r"""Available TLS modes. + + Values: + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED (0): + Not set. + SERVER_AUTHENTICATION (1): + Client to Server traffic encryption enabled + with server authentication. + DISABLED (2): + TLS is disabled for the instance. + """ + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED = 0 + SERVER_AUTHENTICATION = 1 + DISABLED = 2 + + class ReadReplicasMode(proto.Enum): + r"""Read replicas mode. + + Values: + READ_REPLICAS_MODE_UNSPECIFIED (0): + If not set, Memorystore Redis backend will default to + READ_REPLICAS_DISABLED. + READ_REPLICAS_DISABLED (1): + If disabled, read endpoint will not be + provided and the instance cannot scale up or + down the number of replicas. + READ_REPLICAS_ENABLED (2): + If enabled, read endpoint will be provided + and the instance can scale up and down the + number of replicas. Not valid for basic tier. + """ + READ_REPLICAS_MODE_UNSPECIFIED = 0 + READ_REPLICAS_DISABLED = 1 + READ_REPLICAS_ENABLED = 2 + + class SuspensionReason(proto.Enum): + r"""Possible reasons for the instance to be in a "SUSPENDED" + state. + + Values: + SUSPENSION_REASON_UNSPECIFIED (0): + Not set. + CUSTOMER_MANAGED_KEY_ISSUE (1): + Something wrong with the CMEK key provided by + customer. + """ + SUSPENSION_REASON_UNSPECIFIED = 0 + CUSTOMER_MANAGED_KEY_ISSUE = 1 + name: str = proto.Field( proto.STRING, number=1, @@ -272,6 +420,10 @@ class ConnectMode(proto.Enum): proto.STRING, number=9, ) + secondary_ip_range: str = proto.Field( + proto.STRING, + number=30, + ) host: str = proto.Field( proto.STRING, number=10, @@ -325,6 +477,328 @@ class ConnectMode(proto.Enum): number=22, enum=ConnectMode, ) + auth_enabled: bool = proto.Field( + proto.BOOL, + number=23, + ) + server_ca_certs: MutableSequence['TlsCertificate'] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message='TlsCertificate', + ) + transit_encryption_mode: TransitEncryptionMode = proto.Field( + proto.ENUM, + number=26, + enum=TransitEncryptionMode, + ) + maintenance_policy: 'MaintenancePolicy' = proto.Field( + proto.MESSAGE, + number=27, + message='MaintenancePolicy', + ) + maintenance_schedule: 'MaintenanceSchedule' = proto.Field( + proto.MESSAGE, + number=28, + message='MaintenanceSchedule', + ) + replica_count: int = proto.Field( + proto.INT32, + number=31, + ) + nodes: MutableSequence['NodeInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=32, + message='NodeInfo', + ) + read_endpoint: str = proto.Field( + proto.STRING, + number=33, + ) + read_endpoint_port: int = proto.Field( + proto.INT32, + number=34, + ) + read_replicas_mode: ReadReplicasMode = proto.Field( + proto.ENUM, + number=35, + enum=ReadReplicasMode, + ) + customer_managed_key: str = proto.Field( + proto.STRING, + number=36, + ) + persistence_config: 'PersistenceConfig' = proto.Field( + proto.MESSAGE, + number=37, + message='PersistenceConfig', + ) + suspension_reasons: MutableSequence[SuspensionReason] = proto.RepeatedField( + proto.ENUM, + number=38, + enum=SuspensionReason, + ) + maintenance_version: str = proto.Field( + proto.STRING, + number=39, + ) + available_maintenance_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=40, + ) + + +class PersistenceConfig(proto.Message): + r"""Configuration of the persistence functionality. + + Attributes: + persistence_mode (google.cloud.redis_v1.types.PersistenceConfig.PersistenceMode): + Optional. Controls whether Persistence + features are enabled. If not provided, the + existing value will be used. + rdb_snapshot_period (google.cloud.redis_v1.types.PersistenceConfig.SnapshotPeriod): + Optional. Period between RDB snapshots. Snapshots will be + attempted every period starting from the provided snapshot + start time. For example, a start time of 01/01/2033 06:45 + and SIX_HOURS snapshot period will do nothing until + 01/01/2033, and then trigger snapshots every day at 06:45, + 12:45, 18:45, and 00:45 the next day, and so on. If not + provided, TWENTY_FOUR_HOURS will be used as default. + rdb_next_snapshot_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The next time that a snapshot + attempt is scheduled to occur. + rdb_snapshot_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Date and time that the first + snapshot was/will be attempted, and to which + future snapshots will be aligned. If not + provided, the current time will be used. + """ + class PersistenceMode(proto.Enum): + r"""Available Persistence modes. + + Values: + PERSISTENCE_MODE_UNSPECIFIED (0): + Not set. + DISABLED (1): + Persistence is disabled for the instance, + and any existing snapshots are deleted. + RDB (2): + RDB based Persistence is enabled. + """ + PERSISTENCE_MODE_UNSPECIFIED = 0 + DISABLED = 1 + RDB = 2 + + class SnapshotPeriod(proto.Enum): + r"""Available snapshot periods for scheduling. + + Values: + SNAPSHOT_PERIOD_UNSPECIFIED (0): + Not set. + ONE_HOUR (3): + Snapshot every 1 hour. + SIX_HOURS (4): + Snapshot every 6 hours. + TWELVE_HOURS (5): + Snapshot every 12 hours. + TWENTY_FOUR_HOURS (6): + Snapshot every 24 hours. + """ + SNAPSHOT_PERIOD_UNSPECIFIED = 0 + ONE_HOUR = 3 + SIX_HOURS = 4 + TWELVE_HOURS = 5 + TWENTY_FOUR_HOURS = 6 + + persistence_mode: PersistenceMode = proto.Field( + proto.ENUM, + number=1, + enum=PersistenceMode, + ) + rdb_snapshot_period: SnapshotPeriod = proto.Field( + proto.ENUM, + number=2, + enum=SnapshotPeriod, + ) + rdb_next_snapshot_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class RescheduleMaintenanceRequest(proto.Message): + r"""Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + reschedule_type (google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set up + schedule_time as well. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Timestamp when the maintenance shall be + rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC 3339 + format, for example ``2012-11-15T16:19:00.094Z``. + """ + class RescheduleType(proto.Enum): + r"""Reschedule options. + + Values: + RESCHEDULE_TYPE_UNSPECIFIED (0): + Not set. + IMMEDIATE (1): + If the user wants to schedule the maintenance + to happen now. + NEXT_AVAILABLE_WINDOW (2): + If the user wants to use the existing + maintenance policy to find the next available + window. + SPECIFIC_TIME (3): + If the user wants to reschedule the + maintenance to a specific time. + """ + RESCHEDULE_TYPE_UNSPECIFIED = 0 + IMMEDIATE = 1 + NEXT_AVAILABLE_WINDOW = 2 + SPECIFIC_TIME = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + reschedule_type: RescheduleType = proto.Field( + proto.ENUM, + number=2, + enum=RescheduleType, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class MaintenancePolicy(proto.Message): + r"""Maintenance policy for an instance. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + last updated. + description (str): + Optional. Description of what this policy is for. + Create/Update methods return INVALID_ARGUMENT if the length + is greater than 512. + weekly_maintenance_window (MutableSequence[google.cloud.redis_v1.types.WeeklyMaintenanceWindow]): + Optional. Maintenance window that is applied to resources + covered by this policy. Minimum 1. For the current version, + the maximum number of weekly_window is expected to be one. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + weekly_maintenance_window: MutableSequence['WeeklyMaintenanceWindow'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='WeeklyMaintenanceWindow', + ) + + +class WeeklyMaintenanceWindow(proto.Message): + r"""Time window in which disruptive maintenance updates occur. + Non-disruptive updates can occur inside or outside this window. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Required. The day of week that maintenance + updates occur. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time of the window in UTC + time. + duration (google.protobuf.duration_pb2.Duration): + Output only. Duration of the maintenance + window. The current window is fixed at 1 hour. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class MaintenanceSchedule(proto.Message): + r"""Upcoming maintenance schedule. If no maintenance is + scheduled, fields are not populated. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of any upcoming + scheduled maintenance for this instance. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of any upcoming + scheduled maintenance for this instance. + can_reschedule (bool): + If the scheduled maintenance can be + rescheduled, default is true. + schedule_deadline_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deadline that the + maintenance schedule start time can not go + beyond, including reschedule. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + can_reschedule: bool = proto.Field( + proto.BOOL, + number=3, + ) + schedule_deadline_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) class ListInstancesRequest(proto.Message): @@ -377,9 +851,9 @@ class ListInstancesResponse(proto.Message): If the ``location_id`` in the parent field of the request is "-", all regions available to the project are queried, and the results aggregated. If in such an aggregated query a - location is unavailable, a dummy Redis entry is included in - the response with the ``name`` field set to a value of the - form + location is unavailable, a placeholder Redis entry is + included in the response with the ``name`` field set to a + value of the form ``projects/{project_id}/locations/{location_id}/instances/``- and the ``status`` field set to ERROR and ``status_message`` field set to "location not available for ListInstances". @@ -427,6 +901,37 @@ class GetInstanceRequest(proto.Message): ) +class GetInstanceAuthStringRequest(proto.Message): + r"""Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InstanceAuthString(proto.Message): + r"""Instance AUTH string details. + + Attributes: + auth_string (str): + AUTH string set on the instance. + """ + + auth_string: str = proto.Field( + proto.STRING, + number=1, + ) + + class CreateInstanceRequest(proto.Message): r"""Request for [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. @@ -480,6 +985,7 @@ class UpdateInstanceRequest(proto.Message): - ``labels`` - ``memorySizeGb`` - ``redisConfig`` + - ``replica_count`` instance (google.cloud.redis_v1.types.Instance): Required. Update description. Only fields specified in update_mask are updated. @@ -685,7 +1191,7 @@ class DataProtectionMode(proto.Enum): Instance failover will be protected with data loss control. More specifically, the failover will only be performed if the current - replication offset diff between master and + replication offset diff between primary and replica is under a certain threshold. FORCE_DATA_LOSS (2): Instance failover will be performed without @@ -789,4 +1295,49 @@ class ZoneMetadata(proto.Message): """ +class TlsCertificate(proto.Message): + r"""TlsCertificate Resource + + Attributes: + serial_number (str): + Serial number, as extracted from the + certificate. + cert (str): + PEM representation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the certificate was created in + `RFC 3339 `__ format, + for example ``2020-05-18T00:00:00.094Z``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the certificate expires in `RFC + 3339 `__ format, for + example ``2020-05-18T00:00:00.094Z``. + sha1_fingerprint (str): + Sha1 Fingerprint of the certificate. + """ + + serial_number: str = proto.Field( + proto.STRING, + number=1, + ) + cert: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + sha1_fingerprint: str = proto.Field( + proto.STRING, + number=5, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/redis/noxfile.py b/tests/integration/goldens/redis/noxfile.py index 5308e1ac4b..0b02ca125e 100755 --- a/tests/integration/goldens/redis/noxfile.py +++ b/tests/integration/goldens/redis/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py b/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py new file mode 100755 index 0000000000..a8e1b9147d --- /dev/null +++ b/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstanceAuthString +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_GetInstanceAuthString_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +async def sample_get_instance_auth_string(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceAuthStringRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_auth_string(request=request) + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_GetInstanceAuthString_async] diff --git a/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py b/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py new file mode 100755 index 0000000000..a872f6a64a --- /dev/null +++ b/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstanceAuthString +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_GetInstanceAuthString_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +def sample_get_instance_auth_string(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceAuthStringRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_auth_string(request=request) + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_GetInstanceAuthString_sync] diff --git a/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py b/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py new file mode 100755 index 0000000000..564a1c0277 --- /dev/null +++ b/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_RescheduleMaintenance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +async def sample_reschedule_maintenance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_RescheduleMaintenance_async] diff --git a/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py b/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py new file mode 100755 index 0000000000..825fc37aa0 --- /dev/null +++ b/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_RescheduleMaintenance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +def sample_reschedule_maintenance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_RescheduleMaintenance_sync] diff --git a/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json index 18c8da345d..174d9dff49 100755 --- a/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json +++ b/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -687,6 +687,167 @@ ], "title": "redis_v1_generated_cloud_redis_failover_instance_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.get_instance_auth_string", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.GetInstanceAuthString", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "GetInstanceAuthString" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.GetInstanceAuthStringRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_v1.types.InstanceAuthString", + "shortName": "get_instance_auth_string" + }, + "description": "Sample for GetInstanceAuthString", + "file": "redis_v1_generated_cloud_redis_get_instance_auth_string_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_GetInstanceAuthString_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_get_instance_auth_string_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.get_instance_auth_string", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.GetInstanceAuthString", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "GetInstanceAuthString" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.GetInstanceAuthStringRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_v1.types.InstanceAuthString", + "shortName": "get_instance_auth_string" + }, + "description": "Sample for GetInstanceAuthString", + "file": "redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_GetInstanceAuthString_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1178,6 +1339,183 @@ ], "title": "redis_v1_generated_cloud_redis_list_instances_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.RescheduleMaintenanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "redis_v1_generated_cloud_redis_reschedule_maintenance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_RescheduleMaintenance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_reschedule_maintenance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.RescheduleMaintenanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_RescheduleMaintenance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py index b83d29e1c4..4304e380e4 100755 --- a/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py +++ b/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py @@ -44,8 +44,10 @@ class redisCallTransformer(cst.CSTTransformer): 'export_instance': ('name', 'output_config', ), 'failover_instance': ('name', 'data_protection_mode', ), 'get_instance': ('name', ), + 'get_instance_auth_string': ('name', ), 'import_instance': ('name', 'input_config', ), 'list_instances': ('parent', 'page_size', 'page_token', ), + 'reschedule_maintenance': ('name', 'reschedule_type', 'schedule_time', ), 'update_instance': ('update_mask', 'instance', ), 'upgrade_instance': ('name', 'redis_version', ), } diff --git a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index ca2466444c..bc58a6c8ea 100755 --- a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -47,16 +47,20 @@ from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 from google.cloud.redis_v1.services.cloud_redis import CloudRedisAsyncClient from google.cloud.redis_v1.services.cloud_redis import CloudRedisClient from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.services.cloud_redis import transports from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import google.auth @@ -1001,6 +1005,7 @@ def test_get_instance(request_type, transport: str = 'grpc'): alternative_location_id='alternative_location_id_value', redis_version='redis_version_value', reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', host='host_value', port=453, current_location_id='current_location_id_value', @@ -1011,6 +1016,16 @@ def test_get_instance(request_type, transport: str = 'grpc'): authorized_network='authorized_network_value', persistence_iam_identity='persistence_iam_identity_value', connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], ) response = client.get_instance(request) @@ -1027,6 +1042,7 @@ def test_get_instance(request_type, transport: str = 'grpc'): assert response.alternative_location_id == 'alternative_location_id_value' assert response.redis_version == 'redis_version_value' assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' assert response.host == 'host_value' assert response.port == 453 assert response.current_location_id == 'current_location_id_value' @@ -1037,6 +1053,16 @@ def test_get_instance(request_type, transport: str = 'grpc'): assert response.authorized_network == 'authorized_network_value' assert response.persistence_iam_identity == 'persistence_iam_identity_value' assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] def test_get_instance_empty_call(): @@ -1079,6 +1105,7 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= alternative_location_id='alternative_location_id_value', redis_version='redis_version_value', reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', host='host_value', port=453, current_location_id='current_location_id_value', @@ -1089,6 +1116,16 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= authorized_network='authorized_network_value', persistence_iam_identity='persistence_iam_identity_value', connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], )) response = await client.get_instance(request) @@ -1105,6 +1142,7 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= assert response.alternative_location_id == 'alternative_location_id_value' assert response.redis_version == 'redis_version_value' assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' assert response.host == 'host_value' assert response.port == 453 assert response.current_location_id == 'current_location_id_value' @@ -1115,6 +1153,16 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= assert response.authorized_network == 'authorized_network_value' assert response.persistence_iam_identity == 'persistence_iam_identity_value' assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] @pytest.mark.asyncio @@ -1267,6 +1315,238 @@ async def test_get_instance_flattened_error_async(): ) +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceAuthStringRequest, + dict, +]) +def test_get_instance_auth_string(request_type, transport: str = 'grpc'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + ) + response = client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' + + +def test_get_instance_auth_string_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + client.get_instance_auth_string() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + +@pytest.mark.asyncio +async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceAuthStringRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + )) + response = await client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' + + +@pytest.mark.asyncio +async def test_get_instance_auth_string_async_from_dict(): + await test_get_instance_auth_string_async(request_type=dict) + + +def test_get_instance_auth_string_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.GetInstanceAuthStringRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + call.return_value = cloud_redis.InstanceAuthString() + client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_auth_string_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.GetInstanceAuthStringRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString()) + await client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_instance_auth_string_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.InstanceAuthString() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance_auth_string( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_instance_auth_string_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_auth_string( + cloud_redis.GetInstanceAuthStringRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_auth_string_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.InstanceAuthString() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance_auth_string( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_instance_auth_string_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance_auth_string( + cloud_redis.GetInstanceAuthStringRequest(), + name='name_value', + ) + + @pytest.mark.parametrize("request_type", [ cloud_redis.CreateInstanceRequest, dict, @@ -2948,62 +3228,308 @@ async def test_delete_instance_flattened_error_async(): @pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, + cloud_redis.RescheduleMaintenanceRequest, + dict, ]) -def test_list_instances_rest(request_type): +def test_reschedule_maintenance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.reschedule_maintenance(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_instances(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - + assert isinstance(response, future.Future) -def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstancesRequest): - transport_class = transports.CloudRedisRestTransport - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) +def test_reschedule_maintenance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + client.reschedule_maintenance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) +@pytest.mark.asyncio +async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.RescheduleMaintenanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # verify required fields with default values are now present + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async_from_dict(): + await test_reschedule_maintenance_async(request_type=dict) + + +def test_reschedule_maintenance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.RescheduleMaintenanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.RescheduleMaintenanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_reschedule_maintenance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reschedule_maintenance( + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + + +def test_reschedule_maintenance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_redis.RescheduleMaintenanceRequest(), + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reschedule_maintenance( + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reschedule_maintenance( + cloud_redis.RescheduleMaintenanceRequest(), + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +def test_list_instances_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstancesRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' @@ -3262,6 +3788,7 @@ def test_get_instance_rest(request_type): alternative_location_id='alternative_location_id_value', redis_version='redis_version_value', reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', host='host_value', port=453, current_location_id='current_location_id_value', @@ -3272,6 +3799,16 @@ def test_get_instance_rest(request_type): authorized_network='authorized_network_value', persistence_iam_identity='persistence_iam_identity_value', connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], ) # Wrap the value into a proper Response obj @@ -3292,6 +3829,7 @@ def test_get_instance_rest(request_type): assert response.alternative_location_id == 'alternative_location_id_value' assert response.redis_version == 'redis_version_value' assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' assert response.host == 'host_value' assert response.port == 453 assert response.current_location_id == 'current_location_id_value' @@ -3302,6 +3840,16 @@ def test_get_instance_rest(request_type): assert response.authorized_network == 'authorized_network_value' assert response.persistence_iam_identity == 'persistence_iam_identity_value' assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceRequest): @@ -3501,44 +4049,46 @@ def test_get_instance_rest_error(): @pytest.mark.parametrize("request_type", [ - cloud_redis.CreateInstanceRequest, + cloud_redis.GetInstanceAuthStringRequest, dict, ]) -def test_create_instance_rest(request_type): +def test_get_instance_auth_string_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.create_instance(request) + response = client.get_instance_auth_string(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' -def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateInstanceRequest): +def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis.GetInstanceAuthStringRequest): transport_class = transports.CloudRedisRestTransport request_init = {} - request_init["parent"] = "" - request_init["instance_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -3548,28 +4098,20 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns )) # verify fields with default values are dropped - assert "instanceId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' + jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_id", )) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3578,7 +4120,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = cloud_redis.InstanceAuthString() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -3590,40 +4132,278 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "post", + 'method': "get", 'query_params': pb_request, } - transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.create_instance(request) + response = client.get_instance_auth_string(request) expected_params = [ - ( - "instanceId", - "", - ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_create_instance_rest_unset_required_fields(): +def test_get_instance_auth_string_rest_unset_required_fields(): transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) + unset_fields = transport.get_instance_auth_string._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_rest_interceptors(null_interceptor): +def test_get_instance_auth_string_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) + + request = cloud_redis.GetInstanceAuthStringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.InstanceAuthString() + + client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_auth_string_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceAuthStringRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance_auth_string(request) + + +def test_get_instance_auth_string_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.InstanceAuthString() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_instance_auth_string(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}/authString" % client.transport._host, args[1]) + + +def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_auth_string( + cloud_redis.GetInstanceAuthStringRequest(), + name='name_value', + ) + + +def test_get_instance_auth_string_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +def test_create_instance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instanceId"] = 'instance_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("instance_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == 'instance_id_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), @@ -3671,7 +4451,7 @@ def test_create_instance_rest_bad_request(transport: str = 'rest', request_type= # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3758,7 +4538,7 @@ def test_update_instance_rest(request_type): # send a request that will satisfy transcoding request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3901,7 +4681,7 @@ def test_update_instance_rest_bad_request(transport: str = 'rest', request_type= # send a request that will satisfy transcoding request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5120,36 +5900,267 @@ def test_delete_instance_rest_error(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudRedisGrpcTransport( +@pytest.mark.parametrize("request_type", [ + cloud_redis.RescheduleMaintenanceRequest, + dict, +]) +def test_reschedule_maintenance_rest(request_type): + client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudRedisGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudRedisClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudRedisGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudRedisClient( - client_options=options, + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.reschedule_maintenance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.RescheduleMaintenanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.reschedule_maintenance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_reschedule_maintenance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "rescheduleType", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reschedule_maintenance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloud_redis.RescheduleMaintenanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reschedule_maintenance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.RescheduleMaintenanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reschedule_maintenance(request) + + +def test_reschedule_maintenance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.reschedule_maintenance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance" % client.transport._host, args[1]) + + +def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_redis.RescheduleMaintenanceRequest(), + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_reschedule_maintenance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options=options, transport=transport, ) @@ -5249,6 +6260,7 @@ def test_cloud_redis_base_transport(): methods = ( 'list_instances', 'get_instance', + 'get_instance_auth_string', 'create_instance', 'update_instance', 'upgrade_instance', @@ -5256,6 +6268,13 @@ def test_cloud_redis_base_transport(): 'export_instance', 'failover_instance', 'delete_instance', + 'reschedule_maintenance', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', ) for method in methods: with pytest.raises(NotImplementedError): @@ -5522,6 +6541,9 @@ def test_cloud_redis_client_transport_session_collision(transport_name): session1 = client1.transport.get_instance._session session2 = client2.transport.get_instance._session assert session1 != session2 + session1 = client1.transport.get_instance_auth_string._session + session2 = client2.transport.get_instance_auth_string._session + assert session1 != session2 session1 = client1.transport.create_instance._session session2 = client2.transport.create_instance._session assert session1 != session2 @@ -5543,6 +6565,9 @@ def test_cloud_redis_client_transport_session_collision(transport_name): session1 = client1.transport.delete_instance._session session2 = client2.transport.delete_instance._session assert session1 != session2 + session1 = client1.transport.reschedule_maintenance._session + session2 = client2.transport.reschedule_maintenance._session + assert session1 != session2 def test_cloud_redis_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -5826,6 +6851,1062 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "rest": "_session", diff --git a/tests/integration/iamcredentials_v1.yaml b/tests/integration/iamcredentials_v1.yaml new file mode 100644 index 0000000000..6f3f05ab34 --- /dev/null +++ b/tests/integration/iamcredentials_v1.yaml @@ -0,0 +1,17 @@ +type: google.api.Service +config_version: 3 +name: iamcredentials.googleapis.com +title: IAM Service Account Credentials API + +apis: +- name: google.iam.credentials.v1.IAMCredentials + +documentation: + summary: 'Creates short-lived, limited-privilege credentials for IAM service accounts.' + +authentication: + rules: + - selector: 'google.iam.credentials.v1.IAMCredentials.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform \ No newline at end of file diff --git a/tests/integration/logging_v2.yaml b/tests/integration/logging_v2.yaml new file mode 100644 index 0000000000..7f10ee0bd1 --- /dev/null +++ b/tests/integration/logging_v2.yaml @@ -0,0 +1,221 @@ +type: google.api.Service +config_version: 3 +name: logging.googleapis.com +title: Cloud Logging API + +apis: +- name: google.logging.v2.ConfigServiceV2 +- name: google.logging.v2.LoggingServiceV2 +- name: google.logging.v2.MetricsServiceV2 +- name: google.longrunning.Operations + +types: +- name: google.logging.v2.BucketMetadata +- name: google.logging.v2.CopyLogEntriesMetadata +- name: google.logging.v2.CopyLogEntriesResponse +- name: google.logging.v2.LinkMetadata +- name: google.logging.v2.LocationMetadata + +documentation: + summary: Writes log entries and manages your Cloud Logging configuration. + overview: |- + # Introduction + The Cloud Logging service. +backend: + rules: + - selector: 'google.logging.v2.ConfigServiceV2.*' + deadline: 60.0 + - selector: google.logging.v2.ConfigServiceV2.CreateBucket + deadline: 600.0 + - selector: google.logging.v2.ConfigServiceV2.UpdateBucket + deadline: 600.0 + - selector: 'google.logging.v2.LoggingServiceV2.*' + deadline: 60.0 + - selector: google.logging.v2.LoggingServiceV2.ListLogEntries + deadline: 10.0 + - selector: google.logging.v2.LoggingServiceV2.TailLogEntries + deadline: 3600.0 + - selector: 'google.logging.v2.MetricsServiceV2.*' + deadline: 60.0 + - selector: 'google.longrunning.Operations.*' + deadline: 60.0 + +http: + rules: + - selector: google.longrunning.Operations.CancelOperation + post: '/v2/{name=*/*/locations/*/operations/*}:cancel' + body: '*' + additional_bindings: + - post: '/v2/{name=projects/*/locations/*/operations/*}:cancel' + body: '*' + - post: '/v2/{name=organizations/*/locations/*/operations/*}:cancel' + body: '*' + - post: '/v2/{name=folders/*/locations/*/operations/*}:cancel' + body: '*' + - post: '/v2/{name=billingAccounts/*/locations/*/operations/*}:cancel' + body: '*' + - selector: google.longrunning.Operations.GetOperation + get: '/v2/{name=*/*/locations/*/operations/*}' + additional_bindings: + - get: '/v2/{name=projects/*/locations/*/operations/*}' + - get: '/v2/{name=organizations/*/locations/*/operations/*}' + - get: '/v2/{name=folders/*/locations/*/operations/*}' + - get: '/v2/{name=billingAccounts/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.ListOperations + get: '/v2/{name=*/*/locations/*}/operations' + additional_bindings: + - get: '/v2/{name=projects/*/locations/*}/operations' + - get: '/v2/{name=organizations/*/locations/*}/operations' + - get: '/v2/{name=folders/*/locations/*}/operations' + - get: '/v2/{name=billingAccounts/*/locations/*}/operations' + +authentication: + rules: + - selector: 'google.logging.v2.ConfigServiceV2.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin + - selector: google.logging.v2.ConfigServiceV2.GetBucket + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetCmekSettings + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetExclusion + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetLink + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetSettings + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetSink + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetView + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListBuckets + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListExclusions + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListLinks + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListSinks + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListViews + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: 'google.logging.v2.LoggingServiceV2.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.LoggingServiceV2.DeleteLog + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin + - selector: google.logging.v2.LoggingServiceV2.WriteLogEntries + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.write + - selector: 'google.logging.v2.MetricsServiceV2.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.write + - selector: google.logging.v2.MetricsServiceV2.GetLogMetric + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.MetricsServiceV2.ListLogMetrics + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.longrunning.Operations.CancelOperation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin + - selector: google.longrunning.Operations.GetOperation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.longrunning.Operations.ListOperations + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read +publishing: + documentation_uri: https://cloud.google.com/logging/docs/ \ No newline at end of file diff --git a/tests/integration/redis_v1.yaml b/tests/integration/redis_v1.yaml new file mode 100644 index 0000000000..499c13d4e4 --- /dev/null +++ b/tests/integration/redis_v1.yaml @@ -0,0 +1,70 @@ +type: google.api.Service +config_version: 3 +name: redis.googleapis.com +title: Google Cloud Memorystore for Redis API + +apis: +- name: google.cloud.location.Locations +- name: google.cloud.redis.v1.CloudRedis +- name: google.longrunning.Operations + +types: +- name: google.cloud.redis.v1.LocationMetadata +- name: google.cloud.redis.v1.OperationMetadata +- name: google.cloud.redis.v1.ZoneMetadata + +documentation: + summary: Creates and manages Redis instances on the Google Cloud Platform. + rules: + - selector: google.cloud.location.Locations.GetLocation + description: Gets information about a location. + + - selector: google.cloud.location.Locations.ListLocations + description: Lists information about the supported locations for this service. + +backend: + rules: + - selector: google.cloud.location.Locations.GetLocation + deadline: 60.0 + - selector: google.cloud.location.Locations.ListLocations + deadline: 60.0 + - selector: 'google.cloud.redis.v1.CloudRedis.*' + deadline: 60.0 + - selector: google.cloud.redis.v1.CloudRedis.ListInstances + deadline: 20.0 + - selector: 'google.longrunning.Operations.*' + deadline: 60.0 + +http: + rules: + - selector: google.cloud.location.Locations.GetLocation + get: '/v1/{name=projects/*/locations/*}' + - selector: google.cloud.location.Locations.ListLocations + get: '/v1/{name=projects/*}/locations' + - selector: google.longrunning.Operations.CancelOperation + post: '/v1/{name=projects/*/locations/*/operations/*}:cancel' + - selector: google.longrunning.Operations.DeleteOperation + delete: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.GetOperation + get: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.ListOperations + get: '/v1/{name=projects/*/locations/*}/operations' + +authentication: + rules: + - selector: google.cloud.location.Locations.GetLocation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: google.cloud.location.Locations.ListLocations + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.cloud.redis.v1.CloudRedis.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.longrunning.Operations.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform \ No newline at end of file diff --git a/tests/system/test_retry.py b/tests/system/test_retry.py index 0d2f941090..d649cbabd3 100644 --- a/tests/system/test_retry.py +++ b/tests/system/test_retry.py @@ -20,7 +20,8 @@ def test_retry_bubble(echo): - with pytest.raises(exceptions.GatewayTimeout): + # Note: DeadlineExceeded is from gRPC, GatewayTimeout from http + with pytest.raises((exceptions.DeadlineExceeded, exceptions.GatewayTimeout)): echo.echo({ 'error': { 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), diff --git a/tests/system/test_unary.py b/tests/system/test_unary.py index bc72f352af..076771ed30 100644 --- a/tests/system/test_unary.py +++ b/tests/system/test_unary.py @@ -37,8 +37,9 @@ def test_unary_with_dict(echo): def test_unary_error(echo): message = 'Bad things! Bad things!' - # Note: InvalidArgument is from gRPC, InternalServerError from http - with pytest.raises(exceptions.BadRequest) as exc: + # Note: InvalidArgument is from gRPC, BadRequest from http (no MTLS), InternalServerError from http (MTLS) + # TODO: Reduce number of different exception types here. + with pytest.raises((exceptions.InvalidArgument, exceptions.BadRequest, exceptions.InternalServerError)) as exc: echo.echo({ 'error': { 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), diff --git a/tests/unit/generator/test_options.py b/tests/unit/generator/test_options.py index 6716e21fe6..3a12bf474e 100644 --- a/tests/unit/generator/test_options.py +++ b/tests/unit/generator/test_options.py @@ -133,7 +133,7 @@ def test_options_service_config(fs): "UNKNOWN" ] }, - "timeout":"5s" + "timeout": "5s" } ] } diff --git a/tests/unit/utils/test_lines.py b/tests/unit/utils/test_lines.py index 934f2ad666..9642b0f01c 100644 --- a/tests/unit/utils/test_lines.py +++ b/tests/unit/utils/test_lines.py @@ -101,6 +101,15 @@ def test_wrap_with_short_lines(): assert lines.wrap(input, width=60) == expected +def test_lines_which_have_2_spaces_following_period(): + input = """Information related to the a standard versioned package. This includes +package info for APT, Yum, Zypper, and Googet package managers.""" + expected = """Information related to the a standard versioned package. +This includes package info for APT, Yum, Zypper, and Googet +package managers.""" + assert lines.wrap(input, width=60) == expected + + def test_list_each_item_in_list_has_new_line(): input = """Type of weather: - Hail @@ -177,3 +186,98 @@ def test_new_line_added_short_text_before_list(): - Rain - Snow""" assert lines.wrap(input, width=60) == expected + + +def test_new_line_preserved_short_text_before_list_without_colon(): + input = """Today's forecast will have different weather. + +- A mix of hail and snow, followed by rain clouds, then finally clear sky +- Rain +- Snow""" + expected = """Today's forecast will have different weather. + +- A mix of hail and snow, followed by rain clouds, then + finally clear sky +- Rain +- Snow""" + assert lines.wrap(input, width=60) == expected + + +def test_list_with_multiple_paragraphs(): + input = """Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec porta euismod est a viverra. Integer vulputate ipsum id lacus tincidunt, id tincidunt tortor ullamcorper. Vestibulum facilisis at nulla nec lobortis. Nunc consectetur suscipit lacus id aliquam. + +Donec et urna aliquam, efficitur mauris et, consectetur enim. Aliquam aliquet turpis eget erat gravida condimentum. Sed vel feugiat risus. + +Sed interdum. + +Convallis turpis nec congue. Integer vulputate sed urna eu mollis. Mauris in congue nisi, sed pellentesque ex. + +- Ut vestibulum +- consequat imperdiet +- Integer rhoncus varius. Ante, ac tempus augue +finibus sit amet. Integer ac fermentum neque, a sodales nibh. Mauris et dictum ipsum. Integer sit amet posuere urna. Nullam cursus molestie posuere. Praesent imperdiet cursus purus, in posuere odio. +- Orci varius natoque penatibus et + +Aagnis dis parturient montes, nascetur ridiculus mus. Mauris mattis turpis quis hendrerit gravida. Curabitur nec diam erat. In nec est nisl. Quisque ut orci efficitur, vestibulum ante non, vestibulum erat. Donec mollis ultricies nisl.""" + expected = """Lorem ipsum dolor sit amet, consectetur adipiscing elit. +Donec porta euismod est a viverra. Integer vulputate ipsum +id lacus tincidunt, id tincidunt tortor ullamcorper. +Vestibulum facilisis at nulla nec lobortis. Nunc consectetur +suscipit lacus id aliquam. Donec et urna aliquam, efficitur +mauris et, consectetur enim. Aliquam aliquet turpis eget +erat gravida condimentum. Sed vel feugiat risus. + +Sed interdum. + +Convallis turpis nec congue. Integer vulputate sed urna eu +mollis. Mauris in congue nisi, sed pellentesque ex. + +- Ut vestibulum +- consequat imperdiet +- Integer rhoncus varius. Ante, ac tempus augue finibus sit + amet. Integer ac fermentum neque, a sodales nibh. Mauris + et dictum ipsum. Integer sit amet posuere urna. Nullam + cursus molestie posuere. Praesent imperdiet cursus purus, + in posuere odio. +- Orci varius natoque penatibus et + +Aagnis dis parturient montes, nascetur ridiculus mus. Mauris +mattis turpis quis hendrerit gravida. Curabitur nec diam +erat. In nec est nisl. Quisque ut orci efficitur, vestibulum +ante non, vestibulum erat. Donec mollis ultricies nisl.""" + assert lines.wrap(input, width=60) == expected + + +def test_list_with_numbered_list(): + input = """Config for video classification human labeling task. +Currently two types of video classification are supported: +1. Assign labels on the entire video. Assign labels on the entire video. +22. Split the video into multiple video clips based on camera shot, and +assign labels on each video clip.""" + expected = """Config for video classification human labeling task. +Currently two types of video classification are supported: + +1. Assign labels on the entire video. Assign labels on the + entire video. +22. Split the video into multiple video clips based on + camera shot, and assign labels on each video clip.""" + assert lines.wrap(input, width=60) == expected + + +def test_list_with_plus_list_item_marker(): + input = """User-assigned name of the trigger. Must be unique within the project. +Trigger names must meet the following requirements: ++ They must contain only alphanumeric characters and dashes. ++ They can be 1-64 characters long. ++ They must begin and end with an alphanumeric character.""" + expected = """User-assigned name of the trigger. Must +be unique within the project. Trigger +names must meet the following +requirements: + ++ They must contain only alphanumeric + characters and dashes. ++ They can be 1-64 characters long. ++ They must begin and end with an + alphanumeric character.""" + assert lines.wrap(input, width=40) == expected