diff --git a/.coveragerc b/.coveragerc index 028e2ca..c090f54 100644 --- a/.coveragerc +++ b/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3815c98..fccaa8e 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 diff --git a/.github/release-please.yml b/.github/release-please.yml index 6def37a..e9a4f00 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,5 +1,6 @@ releaseType: python handleGHRelease: true +manifest: true # NOTE: this section is generated by synthtool.languages.python # See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py branches: diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 7092a13..e97d89e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d2aee5b..16d5a9e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 87ade4d..23000c0 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -41,7 +41,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 238b87b..f8137d0 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 7718391..cbd7e77 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index d15994b..05dc467 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -110,29 +113,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +155,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -174,76 +181,102 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -303,9 +336,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -321,34 +354,33 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool @@ -377,9 +409,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +424,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +437,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +469,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,25 +479,25 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 46d2371..5405cc8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: rev: 22.3.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: 3.9.2 hooks: - id: flake8 diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 0000000..dd8fde7 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "1.5.0" +} diff --git a/CHANGELOG.md b/CHANGELOG.md index 68a6eb4..eb98345 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [1.5.0](https://github.com/googleapis/python-memcache/compare/v1.4.4...v1.5.0) (2022-12-14) + + +### Features + +* Add support for `google.cloud.memcache.__version__` ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) +* Add typing to proto.Message based class attributes ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) +* Maintenance schedules ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) + + +### Bug Fixes + +* Add dict typing for client_options ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) +* Drop usage of pkg_resources ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) +* Fix timeout default values ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) + ## [1.4.4](https://github.com/googleapis/python-memcache/compare/v1.4.3...v1.4.4) (2022-10-07) diff --git a/docs/memcache_v1/types.rst b/docs/memcache_v1/types.rst index 06bda21..5892ae0 100644 --- a/docs/memcache_v1/types.rst +++ b/docs/memcache_v1/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Memcache v1 API .. automodule:: google.cloud.memcache_v1.types :members: - :undoc-members: :show-inheritance: diff --git a/docs/memcache_v1beta2/types.rst b/docs/memcache_v1beta2/types.rst index 19e52f6..1b47aa6 100644 --- a/docs/memcache_v1beta2/types.rst +++ b/docs/memcache_v1beta2/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Memcache v1beta2 API .. automodule:: google.cloud.memcache_v1beta2.types :members: - :undoc-members: :show-inheritance: diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index 0728147..8865b97 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.memcache import gapic_version as package_version + +__version__ = package_version.__version__ + from google.cloud.memcache_v1.services.cloud_memcache.async_client import ( CloudMemcacheAsyncClient, @@ -26,11 +30,17 @@ Instance, ListInstancesRequest, ListInstancesResponse, + LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, + ZoneMetadata, ) __all__ = ( @@ -43,9 +53,15 @@ "Instance", "ListInstancesRequest", "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", "MemcacheVersion", ) diff --git a/google/cloud/memcache/gapic_version.py b/google/cloud/memcache/gapic_version.py new file mode 100644 index 0000000..997edc1 --- /dev/null +++ b/google/cloud/memcache/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.5.0" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py index a4dbffd..02ebf81 100644 --- a/google/cloud/memcache_v1/__init__.py +++ b/google/cloud/memcache_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.memcache import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.cloud_memcache import CloudMemcacheAsyncClient, CloudMemcacheClient from .types.cloud_memcache import ( @@ -23,11 +27,17 @@ Instance, ListInstancesRequest, ListInstancesResponse, + LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, + ZoneMetadata, ) __all__ = ( @@ -40,9 +50,15 @@ "Instance", "ListInstancesRequest", "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "MemcacheVersion", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", ) diff --git a/google/cloud/memcache_v1/gapic_metadata.json b/google/cloud/memcache_v1/gapic_metadata.json index 08d37fa..8c92ac7 100644 --- a/google/cloud/memcache_v1/gapic_metadata.json +++ b/google/cloud/memcache_v1/gapic_metadata.json @@ -35,6 +35,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -75,6 +80,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" diff --git a/google/cloud/memcache_v1/gapic_version.py b/google/cloud/memcache_v1/gapic_version.py new file mode 100644 index 0000000..997edc1 --- /dev/null +++ b/google/cloud/memcache_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.5.0" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index b848c77..819dc9a 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -24,7 +34,8 @@ from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.memcache_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -33,6 +44,8 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -184,9 +197,9 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud memcache client. @@ -230,11 +243,11 @@ def __init__( async def list_instances( self, - request: Union[cloud_memcache.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. @@ -267,7 +280,7 @@ async def sample_list_instances(): print(response) Args: - request (Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]]): The request object. Request for [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. parent (:class:`str`): @@ -347,11 +360,11 @@ async def sample_list_instances(): async def get_instance( self, - request: Union[cloud_memcache.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -383,7 +396,7 @@ async def sample_get_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]]): The request object. Request for [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. name (:class:`str`): @@ -403,7 +416,7 @@ async def sample_get_instance(): Returns: google.cloud.memcache_v1.types.Instance: - + A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -449,13 +462,13 @@ async def sample_get_instance(): async def create_instance( self, - request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance: cloud_memcache.Instance = None, - instance_id: str = None, + parent: Optional[str] = None, + instance: Optional[cloud_memcache.Instance] = None, + instance_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. @@ -493,13 +506,13 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]]): The request object. Request for [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. parent (:class:`str`): @@ -525,10 +538,10 @@ async def sample_create_instance(): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. - If any of the above are not met, will raise an invalid - argument error. + If any of the above are not met, the API raises an + invalid argument error. This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -544,7 +557,8 @@ async def sample_create_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -603,12 +617,12 @@ async def sample_create_instance(): async def update_instance( self, - request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, *, - instance: cloud_memcache.Instance = None, - update_mask: field_mask_pb2.FieldMask = None, + instance: Optional[cloud_memcache.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates an existing Instance in a given project and @@ -645,13 +659,13 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]]): The request object. Request for [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. instance (:class:`google.cloud.memcache_v1.types.Instance`): @@ -680,7 +694,8 @@ async def sample_update_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -739,19 +754,19 @@ async def sample_update_instance(): async def update_parameters( self, - request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, *, - name: str = None, - update_mask: field_mask_pb2.FieldMask = None, - parameters: cloud_memcache.MemcacheParameters = None, + name: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + r"""Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. .. code-block:: python @@ -778,13 +793,13 @@ async def sample_update_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]]): The request object. Request for [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. name (:class:`str`): @@ -818,7 +833,8 @@ async def sample_update_parameters(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -877,11 +893,11 @@ async def sample_update_parameters(): async def delete_instance( self, - request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. @@ -911,13 +927,13 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]]): The request object. Request for [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. name (:class:`str`): @@ -1003,18 +1019,18 @@ async def sample_delete_instance(): async def apply_parameters( self, - request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, *, - name: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + name: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""ApplyParameters will restart the set of specified - nodes in order to update them to the current set of - parameters for the Memcached Instance. + r"""``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. .. code-block:: python @@ -1041,13 +1057,13 @@ async def sample_apply_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]]): The request object. Request for [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. name (:class:`str`): @@ -1058,20 +1074,18 @@ async def sample_apply_parameters(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (:class:`Sequence[str]`): - Nodes to which we should apply the - instance-level parameter group. + node_ids (:class:`MutableSequence[str]`): + Nodes to which the instance-level + parameter group is applied. This corresponds to the ``node_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. apply_all (:class:`bool`): - Whether to apply instance-level - parameter group to all nodes. If set to - true, will explicitly restrict users - from specifying any nodes, and apply - parameter group updates to all nodes - within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from + specifying individual nodes, and ``ApplyParameters`` + updates all nodes within the instance. This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this @@ -1087,7 +1101,8 @@ async def sample_apply_parameters(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -1144,6 +1159,476 @@ async def sample_apply_parameters(): # Done; return the response. return response + async def reschedule_maintenance( + self, + request: Optional[ + Union[cloud_memcache.RescheduleMaintenanceRequest, dict] + ] = None, + *, + instance: Optional[str] = None, + reschedule_type: Optional[ + cloud_memcache.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Reschedules upcoming maintenance event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memcache_v1 + + async def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.memcache_v1.types.RescheduleMaintenanceRequest, dict]]): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. + instance (:class:`str`): + Required. Memcache instance resource name using the + form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (:class:`google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType`): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Timestamp when the maintenance shall be rescheduled to + if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, + for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.RescheduleMaintenanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reschedule_maintenance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self @@ -1151,14 +1636,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("CloudMemcacheAsyncClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index b182f15..d00e4f2 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -27,7 +38,8 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.memcache_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -36,6 +48,8 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -62,7 +76,7 @@ class CloudMemcacheClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[CloudMemcacheTransport]: """Returns an appropriate transport class. @@ -356,8 +370,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudMemcacheTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, CloudMemcacheTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud memcache client. @@ -371,7 +385,7 @@ def __init__( transport (Union[str, CloudMemcacheTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -401,6 +415,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -453,11 +468,11 @@ def __init__( def list_instances( self, - request: Union[cloud_memcache.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. @@ -570,11 +585,11 @@ def sample_list_instances(): def get_instance( self, - request: Union[cloud_memcache.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -626,7 +641,7 @@ def sample_get_instance(): Returns: google.cloud.memcache_v1.types.Instance: - + A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -672,13 +687,13 @@ def sample_get_instance(): def create_instance( self, - request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance: cloud_memcache.Instance = None, - instance_id: str = None, + parent: Optional[str] = None, + instance: Optional[cloud_memcache.Instance] = None, + instance_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a new Instance in a given location. @@ -748,10 +763,10 @@ def sample_create_instance(): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. - If any of the above are not met, will raise an invalid - argument error. + If any of the above are not met, the API raises an + invalid argument error. This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -767,7 +782,8 @@ def sample_create_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -826,12 +842,12 @@ def sample_create_instance(): def update_instance( self, - request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, *, - instance: cloud_memcache.Instance = None, - update_mask: field_mask_pb2.FieldMask = None, + instance: Optional[cloud_memcache.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates an existing Instance in a given project and @@ -903,7 +919,8 @@ def sample_update_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -962,19 +979,19 @@ def sample_update_instance(): def update_parameters( self, - request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, *, - name: str = None, - update_mask: field_mask_pb2.FieldMask = None, - parameters: cloud_memcache.MemcacheParameters = None, + name: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + r"""Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. .. code-block:: python @@ -1041,7 +1058,8 @@ def sample_update_parameters(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -1100,11 +1118,11 @@ def sample_update_parameters(): def delete_instance( self, - request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes a single Instance. @@ -1226,18 +1244,18 @@ def sample_delete_instance(): def apply_parameters( self, - request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, *, - name: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + name: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""ApplyParameters will restart the set of specified - nodes in order to update them to the current set of - parameters for the Memcached Instance. + r"""``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. .. code-block:: python @@ -1281,20 +1299,18 @@ def sample_apply_parameters(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (Sequence[str]): - Nodes to which we should apply the - instance-level parameter group. + node_ids (MutableSequence[str]): + Nodes to which the instance-level + parameter group is applied. This corresponds to the ``node_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. apply_all (bool): - Whether to apply instance-level - parameter group to all nodes. If set to - true, will explicitly restrict users - from specifying any nodes, and apply - parameter group updates to all nodes - within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from + specifying individual nodes, and ``ApplyParameters`` + updates all nodes within the instance. This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this @@ -1310,7 +1326,8 @@ def sample_apply_parameters(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -1367,6 +1384,151 @@ def sample_apply_parameters(): # Done; return the response. return response + def reschedule_maintenance( + self, + request: Optional[ + Union[cloud_memcache.RescheduleMaintenanceRequest, dict] + ] = None, + *, + instance: Optional[str] = None, + reschedule_type: Optional[ + cloud_memcache.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Reschedules upcoming maintenance event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memcache_v1 + + def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memcache_v1.types.RescheduleMaintenanceRequest, dict]): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. + instance (str): + Required. Memcache instance resource name using the + form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the maintenance shall be rescheduled to + if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, + for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.RescheduleMaintenanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.RescheduleMaintenanceRequest): + request = cloud_memcache.RescheduleMaintenanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self @@ -1380,15 +1542,335 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("CloudMemcacheClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index e02460b..3f787c2 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -22,20 +22,16 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources +from google.cloud.memcache_v1 import gapic_version as package_version from google.cloud.memcache_v1.types import cloud_memcache -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class CloudMemcacheTransport(abc.ABC): @@ -49,7 +45,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -163,6 +159,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=1200.0, client_info=client_info, ), + self.reschedule_maintenance: gapic_v1.method.wrap_method( + self.reschedule_maintenance, + default_timeout=1200.0, + client_info=client_info, + ), } def close(self): @@ -245,6 +246,69 @@ def apply_parameters( ]: raise NotImplementedError() + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index a135b9a..c190673 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -20,6 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore @@ -65,14 +66,14 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -200,8 +201,8 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, @@ -375,10 +376,10 @@ def update_parameters( ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: r"""Return a callable for the update parameters method over gRPC. - Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Returns: Callable[[~.UpdateParametersRequest], @@ -430,9 +431,9 @@ def apply_parameters( ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: r"""Return a callable for the apply parameters method over gRPC. - ApplyParameters will restart the set of specified - nodes in order to update them to the current set of - parameters for the Memcached Instance. + ``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Returns: Callable[[~.ApplyParametersRequest], @@ -452,9 +453,143 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Reschedules upcoming maintenance event. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/RescheduleMaintenance", + request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reschedule_maintenance"] + def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + @property def kind(self) -> str: return "grpc" diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index 3430e39..cfc1ad6 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -19,6 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -67,7 +68,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -110,15 +111,15 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, @@ -389,10 +390,10 @@ def update_parameters( ]: r"""Return a callable for the update parameters method over gRPC. - Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Returns: Callable[[~.UpdateParametersRequest], @@ -448,9 +449,9 @@ def apply_parameters( ]: r"""Return a callable for the apply parameters method over gRPC. - ApplyParameters will restart the set of specified - nodes in order to update them to the current set of - parameters for the Memcached Instance. + ``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Returns: Callable[[~.ApplyParametersRequest], @@ -470,8 +471,143 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Reschedules upcoming maintenance event. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/RescheduleMaintenance", + request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reschedule_maintenance"] + def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + __all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py index adc89ef..3887af7 100644 --- a/google/cloud/memcache_v1/types/__init__.py +++ b/google/cloud/memcache_v1/types/__init__.py @@ -21,11 +21,17 @@ Instance, ListInstancesRequest, ListInstancesResponse, + LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, + ZoneMetadata, ) __all__ = ( @@ -36,9 +42,15 @@ "Instance", "ListInstancesRequest", "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index 1077426..bc1947e 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -13,8 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -22,6 +27,10 @@ manifest={ "MemcacheVersion", "Instance", + "MaintenancePolicy", + "WeeklyMaintenanceWindow", + "MaintenanceSchedule", + "RescheduleMaintenanceRequest", "ListInstancesRequest", "ListInstancesResponse", "GetInstanceRequest", @@ -32,6 +41,8 @@ "UpdateParametersRequest", "MemcacheParameters", "OperationMetadata", + "LocationMetadata", + "ZoneMetadata", }, ) @@ -43,7 +54,7 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): - r""" + r"""A Memorystore for Memcached instance Attributes: name (str): @@ -51,16 +62,17 @@ class Instance(proto.Message): including project and location using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note: Memcached instances are managed and addressed at - regional level so location_id here refers to a GCP region; - however, users may choose which zones Memcached nodes within - an instances should be provisioned in. Refer to [zones] + Note: Memcached instances are managed and addressed at the + regional level so ``location_id`` here refers to a Google + Cloud region; however, users may choose which zones + Memcached nodes should be provisioned in within an instance. + Refer to [zones][google.cloud.memcache.v1.Instance.zones] field for more details. display_name (str): - User provided name for the instance only used - for display purposes. Cannot be more than 80 - characters. - labels (Mapping[str, str]): + User provided name for the instance, which is + only used for display purposes. Cannot be more + than 80 characters. + labels (MutableMapping[str, str]): Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. @@ -70,9 +82,9 @@ class Instance(proto.Message): `network `__ to which the instance is connected. If left unspecified, the ``default`` network will be used. - zones (Sequence[str]): - Zones where Memcached nodes should be - provisioned in. Memcached nodes will be equally + zones (MutableSequence[str]): + Zones in which Memcached nodes should be + provisioned. Memcached nodes will be equally distributed across these zones. If not provided, the service will by default create nodes in all zones in the region for the instance. @@ -84,15 +96,16 @@ class Instance(proto.Message): memcache_version (google.cloud.memcache_v1.types.MemcacheVersion): The major version of Memcached software. If not provided, latest supported version will be used. Currently the latest - supported major version is MEMCACHE_1_5. The minor version - will be automatically determined by our system based on the - latest supported minor version. + supported major version is ``MEMCACHE_1_5``. The minor + version will be automatically determined by our system based + on the latest supported minor version. parameters (google.cloud.memcache_v1.types.MemcacheParameters): - Optional: User defined parameters to apply to - the memcached process on each node. - memcache_nodes (Sequence[google.cloud.memcache_v1.types.Instance.Node]): - Output only. List of Memcached nodes. Refer to [Node] - message for more details. + User defined parameters to apply to the + memcached process on each node. + memcache_nodes (MutableSequence[google.cloud.memcache_v1.types.Instance.Node]): + Output only. List of Memcached nodes. Refer to + [Node][google.cloud.memcache.v1.Instance.Node] message for + more details. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the instance was created. @@ -110,11 +123,18 @@ class Instance(proto.Message): MemcacheVersion. The full version format will be "memcached-1.5.16". - instance_messages (Sequence[google.cloud.memcache_v1.types.Instance.InstanceMessage]): - List of messages that describe current - statuses of memcached instance. + instance_messages (MutableSequence[google.cloud.memcache_v1.types.Instance.InstanceMessage]): + List of messages that describe the current + state of the Memcached instance. discovery_endpoint (str): - Output only. Endpoint for Discovery API + Output only. Endpoint for the Discovery API. + maintenance_policy (google.cloud.memcache_v1.types.MaintenancePolicy): + The maintenance policy for the instance. If + not provided, the maintenance event will be + performed based on Memorystore internal rollout + schedule. + maintenance_schedule (google.cloud.memcache_v1.types.MaintenanceSchedule): + Output only. Published maintenance schedule. """ class State(proto.Enum): @@ -122,6 +142,7 @@ class State(proto.Enum): STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 + UPDATING = 3 DELETING = 4 PERFORMING_MAINTENANCE = 5 @@ -136,11 +157,11 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field( + cpu_count: int = proto.Field( proto.INT32, number=1, ) - memory_size_mb = proto.Field( + memory_size_mb: int = proto.Field( proto.INT32, number=2, ) @@ -179,28 +200,28 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field( + node_id: str = proto.Field( proto.STRING, number=1, ) - zone = proto.Field( + zone: str = proto.Field( proto.STRING, number=2, ) - state = proto.Field( + state: "Instance.Node.State" = proto.Field( proto.ENUM, number=3, enum="Instance.Node.State", ) - host = proto.Field( + host: str = proto.Field( proto.STRING, number=4, ) - port = proto.Field( + port: int = proto.Field( proto.INT32, number=5, ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=6, message="MemcacheParameters", @@ -223,89 +244,247 @@ class Code(proto.Enum): CODE_UNSPECIFIED = 0 ZONE_DISTRIBUTION_UNBALANCED = 1 - code = proto.Field( + code: "Instance.InstanceMessage.Code" = proto.Field( proto.ENUM, number=1, enum="Instance.InstanceMessage.Code", ) - message = proto.Field( + message: str = proto.Field( proto.STRING, number=2, ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, ) - authorized_network = proto.Field( + authorized_network: str = proto.Field( proto.STRING, number=4, ) - zones = proto.RepeatedField( + zones: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=5, ) - node_count = proto.Field( + node_count: int = proto.Field( proto.INT32, number=6, ) - node_config = proto.Field( + node_config: NodeConfig = proto.Field( proto.MESSAGE, number=7, message=NodeConfig, ) - memcache_version = proto.Field( + memcache_version: "MemcacheVersion" = proto.Field( proto.ENUM, number=9, enum="MemcacheVersion", ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=11, message="MemcacheParameters", ) - memcache_nodes = proto.RepeatedField( + memcache_nodes: MutableSequence[Node] = proto.RepeatedField( proto.MESSAGE, number=12, message=Node, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=15, enum=State, ) - memcache_full_version = proto.Field( + memcache_full_version: str = proto.Field( proto.STRING, number=18, ) - instance_messages = proto.RepeatedField( + instance_messages: MutableSequence[InstanceMessage] = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - discovery_endpoint = proto.Field( + discovery_endpoint: str = proto.Field( proto.STRING, number=20, ) + maintenance_policy: "MaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=21, + message="MaintenancePolicy", + ) + maintenance_schedule: "MaintenanceSchedule" = proto.Field( + proto.MESSAGE, + number=22, + message="MaintenanceSchedule", + ) + + +class MaintenancePolicy(proto.Message): + r"""Maintenance policy per instance. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + updated. + description (str): + Description of what this policy is for. Create/Update + methods return INVALID_ARGUMENT if the length is greater + than 512. + weekly_maintenance_window (MutableSequence[google.cloud.memcache_v1.types.WeeklyMaintenanceWindow]): + Required. Maintenance window that is applied to resources + covered by this policy. Minimum 1. For the current version, + the maximum number of weekly_maintenance_windows is expected + to be one. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + weekly_maintenance_window: MutableSequence[ + "WeeklyMaintenanceWindow" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="WeeklyMaintenanceWindow", + ) + + +class WeeklyMaintenanceWindow(proto.Message): + r"""Time window specified for weekly operations. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Required. Allows to define schedule that runs + specified day of the week. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time of the window in UTC. + duration (google.protobuf.duration_pb2.Duration): + Required. Duration of the time window. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class MaintenanceSchedule(proto.Message): + r"""Upcoming maintenance schedule. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of any upcoming + scheduled maintenance for this instance. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of any upcoming + scheduled maintenance for this instance. + schedule_deadline_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deadline that the + maintenance schedule start time can not go + beyond, including reschedule. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + schedule_deadline_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class RescheduleMaintenanceRequest(proto.Message): + r"""Request for + [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. + + Attributes: + instance (str): + Required. Memcache instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + reschedule_type (google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set up + schedule_time as well. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the maintenance shall be rescheduled to if + reschedule_type=SPECIFIC_TIME, in RFC 3339 format, for + example ``2012-11-15T16:19:00.094Z``. + """ + + class RescheduleType(proto.Enum): + r"""Reschedule options.""" + RESCHEDULE_TYPE_UNSPECIFIED = 0 + IMMEDIATE = 1 + NEXT_AVAILABLE_WINDOW = 2 + SPECIFIC_TIME = 3 + + instance: str = proto.Field( + proto.STRING, + number=1, + ) + reschedule_type: RescheduleType = proto.Field( + proto.ENUM, + number=2, + enum=RescheduleType, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) class ListInstancesRequest(proto.Message): @@ -321,40 +500,40 @@ class ListInstancesRequest(proto.Message): The maximum number of items to return. If not specified, a default value of 1000 will be used by - the service. Regardless of the page_size value, the response - may include a partial list and a caller should only rely on - response's - [next_page_token][CloudMemcache.ListInstancesResponse.next_page_token] + the service. Regardless of the ``page_size`` value, the + response may include a partial list and a caller should only + rely on response's + [``next_page_token``][google.cloud.memcache.v1.ListInstancesResponse.next_page_token] to determine if there are more instances left to be queried. page_token (str): - The next_page_token value returned from a previous List + The ``next_page_token`` value returned from a previous List request, if any. filter (str): - List filter. For example, exclude all - Memcached instances with name as my-instance by - specifying "name != my-instance". + List filter. For example, exclude all Memcached instances + with name as my-instance by specifying + ``"name != my-instance"``. order_by (str): Sort results. Supported values are "name", "name desc" or "" (unsorted). """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=4, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=5, ) @@ -365,7 +544,7 @@ class ListInstancesResponse(proto.Message): [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. Attributes: - instances (Sequence[google.cloud.memcache_v1.types.Instance]): + instances (MutableSequence[google.cloud.memcache_v1.types.Instance]): A list of Memcached instances in the project in the specified location, or across all locations. @@ -376,7 +555,7 @@ class ListInstancesResponse(proto.Message): Token to retrieve the next page of results, or empty if there are no more results in the list. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Locations that could not be reached. """ @@ -384,16 +563,16 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Instance", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -410,7 +589,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -434,23 +613,23 @@ class CreateInstanceRequest(proto.Message): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. - If any of the above are not met, will raise an invalid + If any of the above are not met, the API raises an invalid argument error. instance (google.cloud.memcache_v1.types.Instance): Required. A Memcached Instance """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - instance_id = proto.Field( + instance_id: str = proto.Field( proto.STRING, number=2, ) - instance = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, message="Instance", @@ -471,12 +650,12 @@ class UpdateInstanceRequest(proto.Message): update_mask are updated. """ - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, ) - instance = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, message="Instance", @@ -494,7 +673,7 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -509,26 +688,25 @@ class ApplyParametersRequest(proto.Message): Required. Resource name of the Memcached instance for which parameter group updates should be applied. - node_ids (Sequence[str]): - Nodes to which we should apply the - instance-level parameter group. + node_ids (MutableSequence[str]): + Nodes to which the instance-level parameter + group is applied. apply_all (bool): - Whether to apply instance-level parameter - group to all nodes. If set to true, will - explicitly restrict users from specifying any - nodes, and apply parameter group updates to all - nodes within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from specifying + individual nodes, and ``ApplyParameters`` updates all nodes + within the instance. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - node_ids = proto.RepeatedField( + node_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - apply_all = proto.Field( + apply_all: bool = proto.Field( proto.BOOL, number=3, ) @@ -549,16 +727,16 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=3, message="MemcacheParameters", @@ -574,18 +752,19 @@ class MemcacheParameters(proto.Message): this set of parameters. Users can use this id to determine if the parameters associated with the instance differ from the parameters associated - with the nodes and any action needs to be taken - to apply parameters on nodes. - params (Mapping[str, str]): + with the nodes. A discrepancy between parameter + ids can inform users that they may need to take + action to apply parameters on nodes. + params (MutableMapping[str, str]): User defined set of parameters to use in the memcached process. """ - id = proto.Field( + id: str = proto.Field( proto.STRING, number=1, ) - params = proto.MapField( + params: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, @@ -622,36 +801,60 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - target = proto.Field( + target: str = proto.Field( proto.STRING, number=3, ) - verb = proto.Field( + verb: str = proto.Field( proto.STRING, number=4, ) - status_detail = proto.Field( + status_detail: str = proto.Field( proto.STRING, number=5, ) - cancel_requested = proto.Field( + cancel_requested: bool = proto.Field( proto.BOOL, number=6, ) - api_version = proto.Field( + api_version: str = proto.Field( proto.STRING, number=7, ) +class LocationMetadata(proto.Message): + r"""Metadata for the given + [google.cloud.location.Location][google.cloud.location.Location]. + + Attributes: + available_zones (MutableMapping[str, google.cloud.memcache_v1.types.ZoneMetadata]): + Output only. The set of available zones in the location. The + map is keyed by the lowercase ID of each zone, as defined by + GCE. These keys can be specified in the ``zones`` field when + creating a Memcached instance. + """ + + available_zones: MutableMapping[str, "ZoneMetadata"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message="ZoneMetadata", + ) + + +class ZoneMetadata(proto.Message): + r""" """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index 0efe11e..a655516 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.memcache import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.cloud_memcache import CloudMemcacheAsyncClient, CloudMemcacheClient from .types.cloud_memcache import ( @@ -25,11 +29,15 @@ ListInstancesRequest, ListInstancesResponse, LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, ZoneMetadata, ) @@ -45,10 +53,14 @@ "ListInstancesRequest", "ListInstancesResponse", "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "MemcacheVersion", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", "ZoneMetadata", ) diff --git a/google/cloud/memcache_v1beta2/gapic_metadata.json b/google/cloud/memcache_v1beta2/gapic_metadata.json index 288ef89..4143466 100644 --- a/google/cloud/memcache_v1beta2/gapic_metadata.json +++ b/google/cloud/memcache_v1beta2/gapic_metadata.json @@ -40,6 +40,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -85,6 +90,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" diff --git a/google/cloud/memcache_v1beta2/gapic_version.py b/google/cloud/memcache_v1beta2/gapic_version.py new file mode 100644 index 0000000..997edc1 --- /dev/null +++ b/google/cloud/memcache_v1beta2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.5.0" # {x-release-please-version} diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 0c5c5cb..f3f032f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -24,7 +34,8 @@ from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.memcache_v1beta2 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -33,6 +44,8 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -184,9 +197,9 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud memcache client. @@ -230,11 +243,11 @@ def __init__( async def list_instances( self, - request: Union[cloud_memcache.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. @@ -267,7 +280,7 @@ async def sample_list_instances(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]]): The request object. Request for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. parent (:class:`str`): @@ -347,11 +360,11 @@ async def sample_list_instances(): async def get_instance( self, - request: Union[cloud_memcache.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -383,7 +396,7 @@ async def sample_get_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]]): The request object. Request for [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. name (:class:`str`): @@ -449,13 +462,13 @@ async def sample_get_instance(): async def create_instance( self, - request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance_id: str = None, - resource: cloud_memcache.Instance = None, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. @@ -493,13 +506,13 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]]): The request object. Request for [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. parent (:class:`str`): @@ -604,12 +617,12 @@ async def sample_create_instance(): async def update_instance( self, - request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - resource: cloud_memcache.Instance = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates an existing Instance in a given project and @@ -646,13 +659,13 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]]): The request object. Request for [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -741,13 +754,13 @@ async def sample_update_instance(): async def update_parameters( self, - request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, *, - name: str = None, - update_mask: field_mask_pb2.FieldMask = None, - parameters: cloud_memcache.MemcacheParameters = None, + name: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates the defined Memcached parameters for an existing @@ -780,13 +793,13 @@ async def sample_update_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]]): The request object. Request for [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. name (:class:`str`): @@ -880,11 +893,11 @@ async def sample_update_parameters(): async def delete_instance( self, - request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. @@ -914,13 +927,13 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]]): The request object. Request for [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. name (:class:`str`): @@ -1006,13 +1019,13 @@ async def sample_delete_instance(): async def apply_parameters( self, - request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, *, - name: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + name: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""``ApplyParameters`` restarts the set of specified nodes in order @@ -1044,13 +1057,13 @@ async def sample_apply_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]]): The request object. Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. name (:class:`str`): @@ -1061,7 +1074,7 @@ async def sample_apply_parameters(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (:class:`Sequence[str]`): + node_ids (:class:`MutableSequence[str]`): Nodes to which the instance-level parameter group is applied. @@ -1148,13 +1161,15 @@ async def sample_apply_parameters(): async def apply_software_update( self, - request: Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] = None, + request: Optional[ + Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] + ] = None, *, - instance: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + instance: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates software on the selected nodes of the @@ -1185,13 +1200,13 @@ async def sample_apply_software_update(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]]): The request object. Request for [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. instance (:class:`str`): @@ -1202,7 +1217,7 @@ async def sample_apply_software_update(): This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (:class:`Sequence[str]`): + node_ids (:class:`MutableSequence[str]`): Nodes to which we should apply the update to. Note all the selected nodes are updated in parallel. @@ -1290,6 +1305,477 @@ async def sample_apply_software_update(): # Done; return the response. return response + async def reschedule_maintenance( + self, + request: Optional[ + Union[cloud_memcache.RescheduleMaintenanceRequest, dict] + ] = None, + *, + instance: Optional[str] = None, + reschedule_type: Optional[ + cloud_memcache.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Performs the apply phase of the RescheduleMaintenance + verb. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memcache_v1beta2 + + async def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest, dict]]): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. + instance (:class:`str`): + Required. Memcache instance resource name using the + form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (:class:`google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType`): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Timestamp when the maintenance shall be rescheduled to + if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, + for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.RescheduleMaintenanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reschedule_maintenance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self @@ -1297,14 +1783,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("CloudMemcacheAsyncClient",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 83d9691..4bea8ce 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -27,7 +38,8 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.memcache_v1beta2 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -36,6 +48,8 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -62,7 +76,7 @@ class CloudMemcacheClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[CloudMemcacheTransport]: """Returns an appropriate transport class. @@ -356,8 +370,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudMemcacheTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, CloudMemcacheTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud memcache client. @@ -371,7 +385,7 @@ def __init__( transport (Union[str, CloudMemcacheTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -401,6 +415,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -453,11 +468,11 @@ def __init__( def list_instances( self, - request: Union[cloud_memcache.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. @@ -570,11 +585,11 @@ def sample_list_instances(): def get_instance( self, - request: Union[cloud_memcache.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -672,13 +687,13 @@ def sample_get_instance(): def create_instance( self, - request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance_id: str = None, - resource: cloud_memcache.Instance = None, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a new Instance in a given location. @@ -827,12 +842,12 @@ def sample_create_instance(): def update_instance( self, - request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - resource: cloud_memcache.Instance = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates an existing Instance in a given project and @@ -964,13 +979,13 @@ def sample_update_instance(): def update_parameters( self, - request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, *, - name: str = None, - update_mask: field_mask_pb2.FieldMask = None, - parameters: cloud_memcache.MemcacheParameters = None, + name: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates the defined Memcached parameters for an existing @@ -1103,11 +1118,11 @@ def sample_update_parameters(): def delete_instance( self, - request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes a single Instance. @@ -1229,13 +1244,13 @@ def sample_delete_instance(): def apply_parameters( self, - request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, *, - name: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + name: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""``ApplyParameters`` restarts the set of specified nodes in order @@ -1284,7 +1299,7 @@ def sample_apply_parameters(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (Sequence[str]): + node_ids (MutableSequence[str]): Nodes to which the instance-level parameter group is applied. @@ -1371,13 +1386,15 @@ def sample_apply_parameters(): def apply_software_update( self, - request: Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] = None, + request: Optional[ + Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] + ] = None, *, - instance: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + instance: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates software on the selected nodes of the @@ -1425,7 +1442,7 @@ def sample_apply_software_update(): This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (Sequence[str]): + node_ids (MutableSequence[str]): Nodes to which we should apply the update to. Note all the selected nodes are updated in parallel. @@ -1513,6 +1530,152 @@ def sample_apply_software_update(): # Done; return the response. return response + def reschedule_maintenance( + self, + request: Optional[ + Union[cloud_memcache.RescheduleMaintenanceRequest, dict] + ] = None, + *, + instance: Optional[str] = None, + reschedule_type: Optional[ + cloud_memcache.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Performs the apply phase of the RescheduleMaintenance + verb. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memcache_v1beta2 + + def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest, dict]): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. + instance (str): + Required. Memcache instance resource name using the + form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the maintenance shall be rescheduled to + if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, + for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.RescheduleMaintenanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.RescheduleMaintenanceRequest): + request = cloud_memcache.RescheduleMaintenanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self @@ -1526,15 +1689,335 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("CloudMemcacheClient",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index a6e6a4a..828089f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -22,20 +22,16 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources +from google.cloud.memcache_v1beta2 import gapic_version as package_version from google.cloud.memcache_v1beta2.types import cloud_memcache -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class CloudMemcacheTransport(abc.ABC): @@ -49,7 +45,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -168,6 +164,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=1200.0, client_info=client_info, ), + self.reschedule_maintenance: gapic_v1.method.wrap_method( + self.reschedule_maintenance, + default_timeout=1200.0, + client_info=client_info, + ), } def close(self): @@ -259,6 +260,69 @@ def apply_software_update( ]: raise NotImplementedError() + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 4be1255..5035a2e 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -20,6 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore @@ -65,14 +66,14 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -200,8 +201,8 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, @@ -481,9 +482,144 @@ def apply_software_update( ) return self._stubs["apply_software_update"] + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Performs the apply phase of the RescheduleMaintenance + verb. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/RescheduleMaintenance", + request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reschedule_maintenance"] + def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + @property def kind(self) -> str: return "grpc" diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index d0310f8..68085b2 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -19,6 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -67,7 +68,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -110,15 +111,15 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, @@ -499,8 +500,144 @@ def apply_software_update( ) return self._stubs["apply_software_update"] + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Performs the apply phase of the RescheduleMaintenance + verb. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/RescheduleMaintenance", + request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reschedule_maintenance"] + def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + __all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index 073427d..5605887 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -23,11 +23,15 @@ ListInstancesRequest, ListInstancesResponse, LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, ZoneMetadata, ) @@ -41,10 +45,14 @@ "ListInstancesRequest", "ListInstancesResponse", "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", "ZoneMetadata", "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index b4b5c8c..6cd3f19 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -13,8 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -22,12 +27,16 @@ manifest={ "MemcacheVersion", "Instance", + "MaintenancePolicy", + "WeeklyMaintenanceWindow", + "MaintenanceSchedule", "ListInstancesRequest", "ListInstancesResponse", "GetInstanceRequest", "CreateInstanceRequest", "UpdateInstanceRequest", "DeleteInstanceRequest", + "RescheduleMaintenanceRequest", "ApplyParametersRequest", "UpdateParametersRequest", "ApplySoftwareUpdateRequest", @@ -65,7 +74,7 @@ class Instance(proto.Message): User provided name for the instance, which is only used for display purposes. Cannot be more than 80 characters. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. @@ -75,7 +84,7 @@ class Instance(proto.Message): `network `__ to which the instance is connected. If left unspecified, the ``default`` network will be used. - zones (Sequence[str]): + zones (MutableSequence[str]): Zones in which Memcached nodes should be provisioned. Memcached nodes will be equally distributed across these zones. If not provided, @@ -93,9 +102,9 @@ class Instance(proto.Message): version will be automatically determined by our system based on the latest supported minor version. parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): - Optional: User defined parameters to apply to - the memcached process on each node. - memcache_nodes (Sequence[google.cloud.memcache_v1beta2.types.Instance.Node]): + User defined parameters to apply to the + memcached process on each node. + memcache_nodes (MutableSequence[google.cloud.memcache_v1beta2.types.Instance.Node]): Output only. List of Memcached nodes. Refer to [Node][google.cloud.memcache.v1beta2.Instance.Node] message for more details. @@ -116,7 +125,7 @@ class Instance(proto.Message): MemcacheVersion. The full version format will be "memcached-1.5.16". - instance_messages (Sequence[google.cloud.memcache_v1beta2.types.Instance.InstanceMessage]): + instance_messages (MutableSequence[google.cloud.memcache_v1beta2.types.Instance.InstanceMessage]): List of messages that describe the current state of the Memcached instance. discovery_endpoint (str): @@ -124,6 +133,13 @@ class Instance(proto.Message): update_available (bool): Output only. Returns true if there is an update waiting to be applied + maintenance_policy (google.cloud.memcache_v1beta2.types.MaintenancePolicy): + The maintenance policy for the instance. If + not provided, the maintenance event will be + performed based on Memorystore internal rollout + schedule. + maintenance_schedule (google.cloud.memcache_v1beta2.types.MaintenanceSchedule): + Output only. Published maintenance schedule. """ class State(proto.Enum): @@ -131,6 +147,7 @@ class State(proto.Enum): STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 + UPDATING = 3 DELETING = 4 PERFORMING_MAINTENANCE = 5 @@ -145,11 +162,11 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field( + cpu_count: int = proto.Field( proto.INT32, number=1, ) - memory_size_mb = proto.Field( + memory_size_mb: int = proto.Field( proto.INT32, number=2, ) @@ -191,33 +208,33 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field( + node_id: str = proto.Field( proto.STRING, number=1, ) - zone = proto.Field( + zone: str = proto.Field( proto.STRING, number=2, ) - state = proto.Field( + state: "Instance.Node.State" = proto.Field( proto.ENUM, number=3, enum="Instance.Node.State", ) - host = proto.Field( + host: str = proto.Field( proto.STRING, number=4, ) - port = proto.Field( + port: int = proto.Field( proto.INT32, number=5, ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=6, message="MemcacheParameters", ) - update_available = proto.Field( + update_available: bool = proto.Field( proto.BOOL, number=7, ) @@ -239,93 +256,210 @@ class Code(proto.Enum): CODE_UNSPECIFIED = 0 ZONE_DISTRIBUTION_UNBALANCED = 1 - code = proto.Field( + code: "Instance.InstanceMessage.Code" = proto.Field( proto.ENUM, number=1, enum="Instance.InstanceMessage.Code", ) - message = proto.Field( + message: str = proto.Field( proto.STRING, number=2, ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, ) - authorized_network = proto.Field( + authorized_network: str = proto.Field( proto.STRING, number=4, ) - zones = proto.RepeatedField( + zones: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=5, ) - node_count = proto.Field( + node_count: int = proto.Field( proto.INT32, number=6, ) - node_config = proto.Field( + node_config: NodeConfig = proto.Field( proto.MESSAGE, number=7, message=NodeConfig, ) - memcache_version = proto.Field( + memcache_version: "MemcacheVersion" = proto.Field( proto.ENUM, number=9, enum="MemcacheVersion", ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=11, message="MemcacheParameters", ) - memcache_nodes = proto.RepeatedField( + memcache_nodes: MutableSequence[Node] = proto.RepeatedField( proto.MESSAGE, number=12, message=Node, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=15, enum=State, ) - memcache_full_version = proto.Field( + memcache_full_version: str = proto.Field( proto.STRING, number=18, ) - instance_messages = proto.RepeatedField( + instance_messages: MutableSequence[InstanceMessage] = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - discovery_endpoint = proto.Field( + discovery_endpoint: str = proto.Field( proto.STRING, number=20, ) - update_available = proto.Field( + update_available: bool = proto.Field( proto.BOOL, number=21, ) + maintenance_policy: "MaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=22, + message="MaintenancePolicy", + ) + maintenance_schedule: "MaintenanceSchedule" = proto.Field( + proto.MESSAGE, + number=23, + message="MaintenanceSchedule", + ) + + +class MaintenancePolicy(proto.Message): + r"""Maintenance policy per instance. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + updated. + description (str): + Description of what this policy is for. Create/Update + methods return INVALID_ARGUMENT if the length is greater + than 512. + weekly_maintenance_window (MutableSequence[google.cloud.memcache_v1beta2.types.WeeklyMaintenanceWindow]): + Required. Maintenance window that is applied to resources + covered by this policy. Minimum 1. For the current version, + the maximum number of weekly_maintenance_windows is expected + to be one. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + weekly_maintenance_window: MutableSequence[ + "WeeklyMaintenanceWindow" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="WeeklyMaintenanceWindow", + ) + + +class WeeklyMaintenanceWindow(proto.Message): + r"""Time window specified for weekly operations. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Required. Allows to define schedule that runs + specified day of the week. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time of the window in UTC. + duration (google.protobuf.duration_pb2.Duration): + Required. Duration of the time window. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class MaintenanceSchedule(proto.Message): + r"""Upcoming maintenance schedule. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of any upcoming + scheduled maintenance for this instance. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of any upcoming + scheduled maintenance for this instance. + schedule_deadline_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deadline that the + maintenance schedule start time can not go + beyond, including reschedule. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + schedule_deadline_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) class ListInstancesRequest(proto.Message): @@ -358,23 +492,23 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=4, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=5, ) @@ -385,7 +519,7 @@ class ListInstancesResponse(proto.Message): [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. Attributes: - resources (Sequence[google.cloud.memcache_v1beta2.types.Instance]): + resources (MutableSequence[google.cloud.memcache_v1beta2.types.Instance]): A list of Memcached instances in the project in the specified location, or across all locations. @@ -396,7 +530,7 @@ class ListInstancesResponse(proto.Message): Token to retrieve the next page of results, or empty if there are no more results in the list. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Locations that could not be reached. """ @@ -404,16 +538,16 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - resources = proto.RepeatedField( + resources: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Instance", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -430,7 +564,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -462,15 +596,15 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached [Instance] resource """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - instance_id = proto.Field( + instance_id: str = proto.Field( proto.STRING, number=2, ) - resource = proto.Field( + resource: "Instance" = proto.Field( proto.MESSAGE, number=3, message="Instance", @@ -491,12 +625,12 @@ class UpdateInstanceRequest(proto.Message): specified in update_mask are updated. """ - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, ) - resource = proto.Field( + resource: "Instance" = proto.Field( proto.MESSAGE, number=2, message="Instance", @@ -514,12 +648,53 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) +class RescheduleMaintenanceRequest(proto.Message): + r"""Request for + [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. + + Attributes: + instance (str): + Required. Memcache instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + reschedule_type (google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set up + schedule_time as well. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the maintenance shall be rescheduled to if + reschedule_type=SPECIFIC_TIME, in RFC 3339 format, for + example ``2012-11-15T16:19:00.094Z``. + """ + + class RescheduleType(proto.Enum): + r"""Reschedule options.""" + RESCHEDULE_TYPE_UNSPECIFIED = 0 + IMMEDIATE = 1 + NEXT_AVAILABLE_WINDOW = 2 + SPECIFIC_TIME = 3 + + instance: str = proto.Field( + proto.STRING, + number=1, + ) + reschedule_type: RescheduleType = proto.Field( + proto.ENUM, + number=2, + enum=RescheduleType, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class ApplyParametersRequest(proto.Message): r"""Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. @@ -529,7 +704,7 @@ class ApplyParametersRequest(proto.Message): Required. Resource name of the Memcached instance for which parameter group updates should be applied. - node_ids (Sequence[str]): + node_ids (MutableSequence[str]): Nodes to which the instance-level parameter group is applied. apply_all (bool): @@ -539,15 +714,15 @@ class ApplyParametersRequest(proto.Message): within the instance. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - node_ids = proto.RepeatedField( + node_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - apply_all = proto.Field( + apply_all: bool = proto.Field( proto.BOOL, number=3, ) @@ -568,16 +743,16 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=3, message="MemcacheParameters", @@ -593,7 +768,7 @@ class ApplySoftwareUpdateRequest(proto.Message): Required. Resource name of the Memcached instance for which software update should be applied. - node_ids (Sequence[str]): + node_ids (MutableSequence[str]): Nodes to which we should apply the update to. Note all the selected nodes are updated in parallel. @@ -605,40 +780,42 @@ class ApplySoftwareUpdateRequest(proto.Message): instance. """ - instance = proto.Field( + instance: str = proto.Field( proto.STRING, number=1, ) - node_ids = proto.RepeatedField( + node_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - apply_all = proto.Field( + apply_all: bool = proto.Field( proto.BOOL, number=3, ) class MemcacheParameters(proto.Message): - r"""The unique ID associated with this set of parameters. Users - can use this id to determine if the parameters associated with - the instance differ from the parameters associated with the - nodes. A discrepancy between parameter ids can inform users that - they may need to take action to apply parameters on nodes. + r""" Attributes: id (str): - Output only. - params (Mapping[str, str]): + Output only. The unique ID associated with + this set of parameters. Users can use this id to + determine if the parameters associated with the + instance differ from the parameters associated + with the nodes. A discrepancy between parameter + ids can inform users that they may need to take + action to apply parameters on nodes. + params (MutableMapping[str, str]): User defined set of parameters to use in the memcached process. """ - id = proto.Field( + id: str = proto.Field( proto.STRING, number=1, ) - params = proto.MapField( + params: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, @@ -675,33 +852,33 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - target = proto.Field( + target: str = proto.Field( proto.STRING, number=3, ) - verb = proto.Field( + verb: str = proto.Field( proto.STRING, number=4, ) - status_detail = proto.Field( + status_detail: str = proto.Field( proto.STRING, number=5, ) - cancel_requested = proto.Field( + cancel_requested: bool = proto.Field( proto.BOOL, number=6, ) - api_version = proto.Field( + api_version: str = proto.Field( proto.STRING, number=7, ) @@ -712,14 +889,14 @@ class LocationMetadata(proto.Message): [google.cloud.location.Location][google.cloud.location.Location]. Attributes: - available_zones (Mapping[str, google.cloud.memcache_v1beta2.types.ZoneMetadata]): + available_zones (MutableMapping[str, google.cloud.memcache_v1beta2.types.ZoneMetadata]): Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by GCE. These keys can be specified in the ``zones`` field when creating a Memcached instance. """ - available_zones = proto.MapField( + available_zones: MutableMapping[str, "ZoneMetadata"] = proto.MapField( proto.STRING, proto.MESSAGE, number=1, diff --git a/noxfile.py b/noxfile.py index ffe9f35..d8440c0 100644 --- a/noxfile.py +++ b/noxfile.py @@ -273,12 +273,16 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -295,13 +299,16 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) diff --git a/owlbot.py b/owlbot.py new file mode 100644 index 0000000..ce738f0 --- /dev/null +++ b/owlbot.py @@ -0,0 +1,56 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from pathlib import Path +import shutil + +import synthtool as s +import synthtool.gcp as gcp +from synthtool.languages import python + +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- + +clean_up_generated_samples = True + +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get( + "default_version" +) + +for library in s.get_staging_dirs(default_version): + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False + s.move([library], excludes=["**/gapic_version.py"]) +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- + +templated_files = gcp.CommonTemplates().py_library( + cov_level=100, + microgenerator=True, + versions=gcp.common.detect_versions(path="./google", default_first=True), +) +s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml"]) + +python.py_samples(skip_readmes=True) + +# run format session for all directories which have a noxfile +for noxfile in Path(".").glob("**/noxfile.py"): + s.shell.run(["nox", "-s", "format"], cwd=noxfile.parent, hide_output=False) diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 0000000..e7faaa4 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,30 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "release-type": "python", + "extra-files": [ + "google/cloud/memcache_v1beta2/gapic_version.py", + "google/cloud/memcache_v1/gapic_version.py", + "google/cloud/memcache/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json", + "jsonpath": "$.clientLibrary.version" + }, + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json", + "jsonpath": "$.clientLibrary.version" + } + ] + } + }, + "release-type": "python", + "plugins": [ + { + "type": "sentence-case" + } + ], + "initial-version": "0.1.0" +} diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py index 0bfffad..4d7ed2e 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py @@ -48,7 +48,7 @@ async def sample_apply_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py index 58b8f08..3e8e97d 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py @@ -56,7 +56,7 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py index b6a4f6c..c74323e 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py @@ -48,7 +48,7 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py new file mode 100644 index 0000000..11b8744 --- /dev/null +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_v1_generated_CloudMemcache_RescheduleMaintenance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memcache_v1 + + +async def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END memcache_v1_generated_CloudMemcache_RescheduleMaintenance_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py new file mode 100644 index 0000000..821c80e --- /dev/null +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_v1_generated_CloudMemcache_RescheduleMaintenance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memcache_v1 + + +def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_v1_generated_CloudMemcache_RescheduleMaintenance_sync] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py index 7db284c..257fc15 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py @@ -54,7 +54,7 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py index 231298f..94949ec 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py @@ -48,7 +48,7 @@ async def sample_update_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py index 3504b1b..e2f69e7 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py @@ -48,7 +48,7 @@ async def sample_apply_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py index 0dc515a..23486a7 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py @@ -48,7 +48,7 @@ async def sample_apply_software_update(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py index ca5e8de..bba1842 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py @@ -56,7 +56,7 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py index 3ee0113..90e9057 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py @@ -48,7 +48,7 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py new file mode 100644 index 0000000..c700a67 --- /dev/null +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memcache_v1beta2 + + +async def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py new file mode 100644 index 0000000..20b2f08 --- /dev/null +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memcache_v1beta2 + + +def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py index 80ce316..f401e3a 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py @@ -54,7 +54,7 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py index 3e50251..4606935 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py @@ -48,7 +48,7 @@ async def sample_update_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/snippet_metadata_memcache_v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json similarity index 86% rename from samples/generated_samples/snippet_metadata_memcache_v1.json rename to samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 52b01b4..b55122b 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-memcache" + "name": "google-cloud-memcache", + "version": "1.5.0" }, "snippets": [ { @@ -38,7 +39,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -126,7 +127,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -847,6 +848,183 @@ ], "title": "memcache_v1_generated_cloud_memcache_list_instances_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", + "shortName": "CloudMemcache" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memcache_v1_generated_CloudMemcache_RescheduleMaintenance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", + "shortName": "CloudMemcache" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memcache_v1_generated_CloudMemcache_RescheduleMaintenance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json similarity index 87% rename from samples/generated_samples/snippet_metadata_memcache_v1beta2.json rename to samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index e67f9f6..7d1518d 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-memcache" + "name": "google-cloud-memcache", + "version": "1.5.0" }, "snippets": [ { @@ -38,7 +39,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -126,7 +127,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -215,7 +216,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -303,7 +304,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -1024,6 +1025,183 @@ ], "title": "memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", + "shortName": "CloudMemcache" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", + "shortName": "CloudMemcache" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py index 84308d4..de61ce0 100644 --- a/scripts/fixup_memcache_v1_keywords.py +++ b/scripts/fixup_memcache_v1_keywords.py @@ -44,6 +44,7 @@ class memcacheCallTransformer(cst.CSTTransformer): 'delete_instance': ('name', ), 'get_instance': ('name', ), 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'reschedule_maintenance': ('instance', 'reschedule_type', 'schedule_time', ), 'update_instance': ('update_mask', 'instance', ), 'update_parameters': ('name', 'update_mask', 'parameters', ), } diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py index 4b5aa32..b082016 100644 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -45,6 +45,7 @@ class memcacheCallTransformer(cst.CSTTransformer): 'delete_instance': ('name', ), 'get_instance': ('name', ), 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'reschedule_maintenance': ('instance', 'reschedule_type', 'schedule_time', ), 'update_instance': ('update_mask', 'resource', ), 'update_parameters': ('name', 'update_mask', 'parameters', ), } diff --git a/setup.py b/setup.py index 97ad523..a594894 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- - -# Copyright (C) 2019 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,18 +13,30 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import io import os -import setuptools +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) name = "google-cloud-memcache" -description = "Memorystore for Memcached API client library" -version = "1.4.4" -release_status = "Development Status :: 5 - Production/Stable" + + +description = "Google Cloud Memcache API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/memcache/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + dependencies = [ - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] @@ -47,7 +58,6 @@ if "google.cloud" in packages: namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index e69de29..ed7f9ae 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index e69de29..ed7f9ae 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 4005dc5..6c44adf 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.32.0 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index da93009..ed7f9ae 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -1,2 +1,6 @@ -# This constraints file is left inentionally empty -# so the latest version of dependencies is installed \ No newline at end of file +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index da93009..ed7f9ae 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -1,2 +1,6 @@ -# This constraints file is left inentionally empty -# so the latest version of dependencies is installed \ No newline at end of file +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 0817bd9..6c9e884 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -39,10 +39,15 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -2594,6 +2599,267 @@ async def test_apply_parameters_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.RescheduleMaintenanceRequest, + dict, + ], +) +def test_reschedule_maintenance(request_type, transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_reschedule_maintenance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + client.reschedule_maintenance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async( + transport: str = "grpc_asyncio", + request_type=cloud_memcache.RescheduleMaintenanceRequest, +): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async_from_dict(): + await test_reschedule_maintenance_async(request_type=dict) + + +def test_reschedule_maintenance_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.RescheduleMaintenanceRequest() + + request.instance = "instance_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance=instance_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.RescheduleMaintenanceRequest() + + request.instance = "instance_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance=instance_value", + ) in kw["metadata"] + + +def test_reschedule_maintenance_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reschedule_maintenance( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +def test_reschedule_maintenance_flattened_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reschedule_maintenance( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_error_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( @@ -2738,6 +3004,13 @@ def test_cloud_memcache_base_transport(): "update_parameters", "delete_instance", "apply_parameters", + "reschedule_maintenance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3286,6 +3559,860 @@ async def test_transport_close_async(): close.assert_called_once() +def test_delete_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index c45ad19..8fe7f1d 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -39,10 +39,15 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -2859,6 +2864,267 @@ async def test_apply_software_update_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.RescheduleMaintenanceRequest, + dict, + ], +) +def test_reschedule_maintenance(request_type, transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_reschedule_maintenance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + client.reschedule_maintenance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async( + transport: str = "grpc_asyncio", + request_type=cloud_memcache.RescheduleMaintenanceRequest, +): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async_from_dict(): + await test_reschedule_maintenance_async(request_type=dict) + + +def test_reschedule_maintenance_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.RescheduleMaintenanceRequest() + + request.instance = "instance_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance=instance_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.RescheduleMaintenanceRequest() + + request.instance = "instance_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance=instance_value", + ) in kw["metadata"] + + +def test_reschedule_maintenance_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reschedule_maintenance( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +def test_reschedule_maintenance_flattened_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reschedule_maintenance( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_error_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( @@ -3004,6 +3270,13 @@ def test_cloud_memcache_base_transport(): "delete_instance", "apply_parameters", "apply_software_update", + "reschedule_maintenance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3552,6 +3825,860 @@ async def test_transport_close_async(): close.assert_called_once() +def test_delete_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel",