From 8d4ab28fbe9dba4ba03195a91dfad4a04dc151b3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Nov 2022 02:16:16 +0000 Subject: [PATCH 1/7] chore(python): update dependencies in .kokoro/requirements.txt [autoapprove] (#146) Source-Link: https://togithub.com/googleapis/synthtool/commit/e3a1277ac35fc88c09db1930533e24292b132ced Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 325 +++++++++++++++++++++----------------- noxfile.py | 11 +- 3 files changed, 187 insertions(+), 151 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3815c98..12edee7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index d15994b..31425f1 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -110,29 +110,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +152,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.9.1 \ + --hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \ + --hash=sha256:d0d3c814a97c1a237517e837d8cfa668ced8df4b882452578ecef4a4e79c583b # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.0 \ + --hash=sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700 \ + --hash=sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d # via # gcp-releasetool # google-api-core @@ -178,72 +182,97 @@ google-cloud-storage==2.5.0 \ --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage googleapis-common-protos==1.56.4 \ --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +284,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.9.3 \ + --hash=sha256:69732a15cb1433bdfbc3b980a8a36a04878a6cfd7cb99f497b573f31618001c0 \ + --hash=sha256:69b01dd83c42f590250fe7a1f503fc229b14de83857314b1933a3ddbf595c4a5 # via # gcp-releasetool # twine @@ -303,9 +332,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -325,34 +354,34 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 @@ -377,9 +406,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +421,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +434,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +466,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,9 +476,9 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.6 \ + --hash=sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108 \ + --hash=sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ @@ -459,13 +488,13 @@ wheel==0.37.1 \ --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.0 \ + --hash=sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17 \ + --hash=sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356 # via -r requirements.in diff --git a/noxfile.py b/noxfile.py index ffe9f35..29d0200 100644 --- a/noxfile.py +++ b/noxfile.py @@ -278,7 +278,11 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -301,7 +305,10 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From f5dd4cc607e663729b7760cb68e481656d309a83 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 16 Nov 2022 17:12:41 +0000 Subject: [PATCH 2/7] chore(python): update release script dependencies [autoapprove] (#148) Source-Link: https://togithub.com/googleapis/synthtool/commit/25083af347468dd5f90f69627420f7d452b6c50e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 --- .github/.OwlBot.lock.yaml | 2 +- .github/workflows/docs.yml | 4 +-- .github/workflows/lint.yml | 2 +- .github/workflows/unittest.yml | 2 +- .kokoro/docker/docs/Dockerfile | 12 +++---- .kokoro/requirements.in | 4 ++- .kokoro/requirements.txt | 61 ++++++++++++++++++---------------- noxfile.py | 4 +-- 8 files changed, 48 insertions(+), 43 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 12edee7..3f1ccc0 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 + digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 7092a13..e97d89e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d2aee5b..16d5a9e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 87ade4d..23000c0 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -41,7 +41,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 238b87b..f8137d0 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 7718391..cbd7e77 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 31425f1..9c1b9be 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -156,9 +159,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.9.1 \ - --hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \ - --hash=sha256:d0d3c814a97c1a237517e837d8cfa668ced8df4b882452578ecef4a4e79c583b +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ @@ -166,9 +169,9 @@ google-api-core==2.10.2 \ # via # google-cloud-core # google-cloud-storage -google-auth==2.14.0 \ - --hash=sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700 \ - --hash=sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -178,9 +181,9 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -256,9 +259,9 @@ google-resumable-media==2.4.0 \ --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ @@ -269,6 +272,7 @@ importlib-metadata==5.0.0 \ --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine jaraco-classes==3.2.3 \ --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ @@ -284,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.3 \ - --hash=sha256:69732a15cb1433bdfbc3b980a8a36a04878a6cfd7cb99f497b573f31618001c0 \ - --hash=sha256:69b01dd83c42f590250fe7a1f503fc229b14de83857314b1933a3ddbf595c4a5 +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -350,9 +354,9 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -381,7 +385,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core - # googleapis-common-protos py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 @@ -476,17 +479,17 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.6 \ - --hash=sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108 \ - --hash=sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in zipp==3.10.0 \ --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ @@ -494,7 +497,7 @@ zipp==3.10.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.0 \ - --hash=sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17 \ - --hash=sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/noxfile.py b/noxfile.py index 29d0200..d8440c0 100644 --- a/noxfile.py +++ b/noxfile.py @@ -273,7 +273,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" @@ -299,7 +299,7 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" From 8edf5948c6a59e5172c042faf5c40d98066b52a0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 25 Nov 2022 11:52:23 -0500 Subject: [PATCH 3/7] chore: Update gapic-generator-python to v1.6.1 (#145) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update to gapic-generator-python 1.5.0 feat: add support for `google.cloud..__version__` PiperOrigin-RevId: 484665853 Source-Link: https://github.com/googleapis/googleapis/commit/8eb249a19db926c2fbc4ecf1dc09c0e521a88b22 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c8aa327b5f478865fc3fd91e3c2768e54e26ad44 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzhhYTMyN2I1ZjQ3ODg2NWZjM2ZkOTFlM2MyNzY4ZTU0ZTI2YWQ0NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update version in gapic_version.py * add .release-please-manifest.json with correct version * add owlbot.py to exclude generated gapic_version.py * set manifest to true in .github/release-please.yml * add release-please-config.json * chore: Update to gapic-generator-python 1.6.0 feat(python): Add typing to proto.Message based class attributes feat(python): Snippetgen handling of repeated enum field PiperOrigin-RevId: 487326846 Source-Link: https://github.com/googleapis/googleapis/commit/da380c77bb87ba0f752baf07605dd1db30e1f7e1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/61ef5762ee6731a0cbbfea22fd0eecee51ab1c8e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjFlZjU3NjJlZTY3MzFhMGNiYmZlYTIyZmQwZWVjZWU1MWFiMWM4ZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: new APIs added to reflect updates to the filestore service - Add ENTERPRISE Tier - Add snapshot APIs: RevertInstance, ListSnapshots, CreateSnapshot, DeleteSnapshot, UpdateSnapshot - Add multi-share APIs: ListShares, GetShare, CreateShare, DeleteShare, UpdateShare - Add ConnectMode to NetworkConfig (for Private Service Access support) - New status codes (SUSPENDED/SUSPENDING, REVERTING/RESUMING) - Add SuspensionReason (for KMS related suspension) - Add new fields to Instance information: max_capacity_gb, capacity_step_size_gb, max_share_count, capacity_gb, multi_share_enabled PiperOrigin-RevId: 487492758 Source-Link: https://github.com/googleapis/googleapis/commit/5be5981f50322cf0c7388595e0f31ac5d0693469 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab0e217f560cc2c1afc11441c2eab6b6950efd2b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWIwZTIxN2Y1NjBjYzJjMWFmYzExNDQxYzJlYWI2YjY5NTBlZmQyYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update path to snippet metadata json * chore: Update gapic-generator-python to v1.6.1 PiperOrigin-RevId: 488036204 Source-Link: https://github.com/googleapis/googleapis/commit/08f275f5c1c0d99056e1cb68376323414459ee19 Source-Link: https://github.com/googleapis/googleapis-gen/commit/555c0945e60649e38739ae64bc45719cdf72178f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU1YzA5NDVlNjA2NDllMzg3MzlhZTY0YmM0NTcxOWNkZjcyMTc4ZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/release-please.yml | 1 + .release-please-manifest.json | 3 + docs/service_v1/types.rst | 1 - docs/service_v1beta1/types.rst | 1 - .../orchestration/airflow/service/__init__.py | 4 + .../airflow/service/gapic_version.py | 16 ++ .../airflow/service_v1/__init__.py | 4 + .../services/environments/async_client.py | 62 +++--- .../services/environments/client.py | 58 +++-- .../services/environments/transports/base.py | 2 +- .../services/environments/transports/grpc.py | 20 +- .../environments/transports/grpc_asyncio.py | 16 +- .../services/image_versions/async_client.py | 24 +- .../services/image_versions/client.py | 28 ++- .../image_versions/transports/base.py | 2 +- .../image_versions/transports/grpc.py | 20 +- .../image_versions/transports/grpc_asyncio.py | 16 +- .../airflow/service_v1/types/environments.py | 156 ++++++------- .../service_v1/types/image_versions.py | 30 +-- .../airflow/service_v1/types/operations.py | 14 +- .../airflow/service_v1beta1/__init__.py | 4 + .../services/environments/async_client.py | 74 +++--- .../services/environments/client.py | 66 +++--- .../services/environments/transports/base.py | 2 +- .../services/environments/transports/grpc.py | 20 +- .../environments/transports/grpc_asyncio.py | 16 +- .../services/image_versions/async_client.py | 24 +- .../services/image_versions/client.py | 28 ++- .../image_versions/transports/base.py | 2 +- .../image_versions/transports/grpc.py | 20 +- .../image_versions/transports/grpc_asyncio.py | 16 +- .../service_v1beta1/types/environments.py | 210 +++++++++--------- .../service_v1beta1/types/image_versions.py | 30 +-- .../service_v1beta1/types/operations.py | 14 +- owlbot.py | 56 +++++ release-please-config.json | 28 +++ ...oud.orchestration.airflow.service.v1.json} | 3 +- ...rchestration.airflow.service.v1beta1.json} | 3 +- setup.py | 37 ++- testing/constraints-3.10.txt | 6 + testing/constraints-3.11.txt | 6 + testing/constraints-3.7.txt | 2 +- testing/constraints-3.8.txt | 6 + testing/constraints-3.9.txt | 6 + .../gapic/service_v1/test_environments.py | 1 + .../service_v1beta1/test_environments.py | 1 + 46 files changed, 708 insertions(+), 451 deletions(-) create mode 100644 .release-please-manifest.json create mode 100644 google/cloud/orchestration/airflow/service/gapic_version.py create mode 100644 owlbot.py create mode 100644 release-please-config.json rename samples/generated_samples/{snippet_metadata_service_v1.json => snippet_metadata_google.cloud.orchestration.airflow.service.v1.json} (99%) rename samples/generated_samples/{snippet_metadata_service_v1beta1.json => snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json} (99%) diff --git a/.github/release-please.yml b/.github/release-please.yml index 6def37a..e9a4f00 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,5 +1,6 @@ releaseType: python handleGHRelease: true +manifest: true # NOTE: this section is generated by synthtool.languages.python # See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py branches: diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 0000000..50f0c45 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "1.4.4" +} diff --git a/docs/service_v1/types.rst b/docs/service_v1/types.rst index 98c0eb9..b49b771 100644 --- a/docs/service_v1/types.rst +++ b/docs/service_v1/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Orchestration Airflow Service v1 API .. automodule:: google.cloud.orchestration.airflow.service_v1.types :members: - :undoc-members: :show-inheritance: diff --git a/docs/service_v1beta1/types.rst b/docs/service_v1beta1/types.rst index 28e626b..9caa208 100644 --- a/docs/service_v1beta1/types.rst +++ b/docs/service_v1beta1/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Orchestration Airflow Service v1beta1 API .. automodule:: google.cloud.orchestration.airflow.service_v1beta1.types :members: - :undoc-members: :show-inheritance: diff --git a/google/cloud/orchestration/airflow/service/__init__.py b/google/cloud/orchestration/airflow/service/__init__.py index af1bee9..1d4216c 100644 --- a/google/cloud/orchestration/airflow/service/__init__.py +++ b/google/cloud/orchestration/airflow/service/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.orchestration.airflow.service import gapic_version as package_version + +__version__ = package_version.__version__ + from google.cloud.orchestration.airflow.service_v1.services.environments.async_client import ( EnvironmentsAsyncClient, diff --git a/google/cloud/orchestration/airflow/service/gapic_version.py b/google/cloud/orchestration/airflow/service/gapic_version.py new file mode 100644 index 0000000..25e4dd6 --- /dev/null +++ b/google/cloud/orchestration/airflow/service/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.4.4" # {x-release-please-version} diff --git a/google/cloud/orchestration/airflow/service_v1/__init__.py b/google/cloud/orchestration/airflow/service_v1/__init__.py index 88f47ae..c8b3ab7 100644 --- a/google/cloud/orchestration/airflow/service_v1/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.orchestration.airflow.service import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.environments import EnvironmentsAsyncClient, EnvironmentsClient from .services.image_versions import ImageVersionsAsyncClient, ImageVersionsClient diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py b/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py index fe7be45..22972a4 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -161,9 +171,9 @@ def transport(self) -> EnvironmentsTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, EnvironmentsTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the environments client. @@ -207,12 +217,12 @@ def __init__( async def create_environment( self, - request: Union[environments.CreateEnvironmentRequest, dict] = None, + request: Optional[Union[environments.CreateEnvironmentRequest, dict]] = None, *, - parent: str = None, - environment: environments.Environment = None, + parent: Optional[str] = None, + environment: Optional[environments.Environment] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Create a new environment. @@ -247,7 +257,7 @@ async def sample_create_environment(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1.types.CreateEnvironmentRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.CreateEnvironmentRequest, dict]]): The request object. Create a new environment. parent (:class:`str`): The parent must be of the form @@ -330,11 +340,11 @@ async def sample_create_environment(): async def get_environment( self, - request: Union[environments.GetEnvironmentRequest, dict] = None, + request: Optional[Union[environments.GetEnvironmentRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> environments.Environment: r"""Get an existing environment. @@ -365,7 +375,7 @@ async def sample_get_environment(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1.types.GetEnvironmentRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.GetEnvironmentRequest, dict]]): The request object. Get an environment. name (:class:`str`): The resource name of the environment @@ -431,11 +441,11 @@ async def sample_get_environment(): async def list_environments( self, - request: Union[environments.ListEnvironmentsRequest, dict] = None, + request: Optional[Union[environments.ListEnvironmentsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEnvironmentsAsyncPager: r"""List environments. @@ -467,7 +477,7 @@ async def sample_list_environments(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1.types.ListEnvironmentsRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.ListEnvironmentsRequest, dict]]): The request object. List environments in a project and location. parent (:class:`str`): @@ -546,13 +556,13 @@ async def sample_list_environments(): async def update_environment( self, - request: Union[environments.UpdateEnvironmentRequest, dict] = None, + request: Optional[Union[environments.UpdateEnvironmentRequest, dict]] = None, *, - name: str = None, - environment: environments.Environment = None, - update_mask: field_mask_pb2.FieldMask = None, + name: Optional[str] = None, + environment: Optional[environments.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Update an environment. @@ -587,7 +597,7 @@ async def sample_update_environment(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1.types.UpdateEnvironmentRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.UpdateEnvironmentRequest, dict]]): The request object. Update an environment. name (:class:`str`): The relative resource name of the @@ -843,11 +853,11 @@ async def sample_update_environment(): async def delete_environment( self, - request: Union[environments.DeleteEnvironmentRequest, dict] = None, + request: Optional[Union[environments.DeleteEnvironmentRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Delete an environment. @@ -882,7 +892,7 @@ async def sample_delete_environment(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1.types.DeleteEnvironmentRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.DeleteEnvironmentRequest, dict]]): The request object. Delete an environment. name (:class:`str`): The environment to delete, in the diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/client.py b/google/cloud/orchestration/airflow/service_v1/services/environments/client.py index 6c3b969..8dd9f4b 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -62,7 +73,7 @@ class EnvironmentsClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[EnvironmentsTransport]: """Returns an appropriate transport class. @@ -339,8 +350,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, EnvironmentsTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, EnvironmentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the environments client. @@ -354,7 +365,7 @@ def __init__( transport (Union[str, EnvironmentsTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -384,6 +395,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -436,12 +448,12 @@ def __init__( def create_environment( self, - request: Union[environments.CreateEnvironmentRequest, dict] = None, + request: Optional[Union[environments.CreateEnvironmentRequest, dict]] = None, *, - parent: str = None, - environment: environments.Environment = None, + parent: Optional[str] = None, + environment: Optional[environments.Environment] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Create a new environment. @@ -559,11 +571,11 @@ def sample_create_environment(): def get_environment( self, - request: Union[environments.GetEnvironmentRequest, dict] = None, + request: Optional[Union[environments.GetEnvironmentRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> environments.Environment: r"""Get an existing environment. @@ -660,11 +672,11 @@ def sample_get_environment(): def list_environments( self, - request: Union[environments.ListEnvironmentsRequest, dict] = None, + request: Optional[Union[environments.ListEnvironmentsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEnvironmentsPager: r"""List environments. @@ -775,13 +787,13 @@ def sample_list_environments(): def update_environment( self, - request: Union[environments.UpdateEnvironmentRequest, dict] = None, + request: Optional[Union[environments.UpdateEnvironmentRequest, dict]] = None, *, - name: str = None, - environment: environments.Environment = None, - update_mask: field_mask_pb2.FieldMask = None, + name: Optional[str] = None, + environment: Optional[environments.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Update an environment. @@ -1072,11 +1084,11 @@ def sample_update_environment(): def delete_environment( self, - request: Union[environments.DeleteEnvironmentRequest, dict] = None, + request: Optional[Union[environments.DeleteEnvironmentRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Delete an environment. diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py index 4d7419b..8857232 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py @@ -49,7 +49,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py index 48588d9..5c43736 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py @@ -47,14 +47,14 @@ def __init__( self, *, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -182,8 +182,8 @@ def __init__( def create_channel( cls, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py index f3d31e1..d14cbb1 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py @@ -49,7 +49,7 @@ class EnvironmentsGrpcAsyncIOTransport(EnvironmentsTransport): def create_channel( cls, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -92,15 +92,15 @@ def __init__( self, *, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py b/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py index 0f309ae..9143806 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -157,9 +167,9 @@ def transport(self) -> ImageVersionsTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ImageVersionsTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the image versions client. @@ -203,11 +213,11 @@ def __init__( async def list_image_versions( self, - request: Union[image_versions.ListImageVersionsRequest, dict] = None, + request: Optional[Union[image_versions.ListImageVersionsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListImageVersionsAsyncPager: r"""List ImageVersions for provided location. @@ -239,7 +249,7 @@ async def sample_list_image_versions(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1.types.ListImageVersionsRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.ListImageVersionsRequest, dict]]): The request object. List ImageVersions in a project and location. parent (:class:`str`): diff --git a/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py b/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py index b6221fd..7f59bd3 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -56,7 +67,7 @@ class ImageVersionsClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[ImageVersionsTransport]: """Returns an appropriate transport class. @@ -309,8 +320,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ImageVersionsTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, ImageVersionsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the image versions client. @@ -324,7 +335,7 @@ def __init__( transport (Union[str, ImageVersionsTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -354,6 +365,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -406,11 +418,11 @@ def __init__( def list_image_versions( self, - request: Union[image_versions.ListImageVersionsRequest, dict] = None, + request: Optional[Union[image_versions.ListImageVersionsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListImageVersionsPager: r"""List ImageVersions for provided location. diff --git a/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/base.py b/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/base.py index 4b0e33f..733dbe2 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/base.py @@ -48,7 +48,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/grpc.py b/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/grpc.py index 7724ebd..01df430 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/grpc.py +++ b/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/grpc.py @@ -46,14 +46,14 @@ def __init__( self, *, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -180,8 +180,8 @@ def __init__( def create_channel( cls, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/grpc_asyncio.py b/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/grpc_asyncio.py index d27546a..7a63862 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/grpc_asyncio.py +++ b/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/grpc_asyncio.py @@ -48,7 +48,7 @@ class ImageVersionsGrpcAsyncIOTransport(ImageVersionsTransport): def create_channel( cls, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -91,15 +91,15 @@ def __init__( self, *, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/orchestration/airflow/service_v1/types/environments.py b/google/cloud/orchestration/airflow/service_v1/types/environments.py index 5a24681..e29369d 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/environments.py +++ b/google/cloud/orchestration/airflow/service_v1/types/environments.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -53,11 +55,11 @@ class CreateEnvironmentRequest(proto.Message): The environment to create. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - environment = proto.Field( + environment: "Environment" = proto.Field( proto.MESSAGE, number=2, message="Environment", @@ -74,7 +76,7 @@ class GetEnvironmentRequest(proto.Message): "projects/{projectId}/locations/{locationId}/environments/{environmentId}". """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -95,15 +97,15 @@ class ListEnvironmentsRequest(proto.Message): request, if any. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -113,7 +115,7 @@ class ListEnvironmentsResponse(proto.Message): r"""The environments in a project and location. Attributes: - environments (Sequence[google.cloud.orchestration.airflow.service_v1.types.Environment]): + environments (MutableSequence[google.cloud.orchestration.airflow.service_v1.types.Environment]): The list of environments returned by a ListEnvironmentsRequest. next_page_token (str): @@ -125,12 +127,12 @@ class ListEnvironmentsResponse(proto.Message): def raw_page(self): return self - environments = proto.RepeatedField( + environments: MutableSequence["Environment"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Environment", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -145,7 +147,7 @@ class DeleteEnvironmentRequest(proto.Message): "projects/{projectId}/locations/{locationId}/environments/{environmentId}". """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -317,16 +319,16 @@ class UpdateEnvironmentRequest(proto.Message): variables. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=2, ) - environment = proto.Field( + environment: "Environment" = proto.Field( proto.MESSAGE, number=1, message="Environment", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, @@ -383,54 +385,54 @@ class EnvironmentConfig(proto.Message): interface `__). """ - gke_cluster = proto.Field( + gke_cluster: str = proto.Field( proto.STRING, number=1, ) - dag_gcs_prefix = proto.Field( + dag_gcs_prefix: str = proto.Field( proto.STRING, number=2, ) - node_count = proto.Field( + node_count: int = proto.Field( proto.INT32, number=3, ) - software_config = proto.Field( + software_config: "SoftwareConfig" = proto.Field( proto.MESSAGE, number=4, message="SoftwareConfig", ) - node_config = proto.Field( + node_config: "NodeConfig" = proto.Field( proto.MESSAGE, number=5, message="NodeConfig", ) - private_environment_config = proto.Field( + private_environment_config: "PrivateEnvironmentConfig" = proto.Field( proto.MESSAGE, number=7, message="PrivateEnvironmentConfig", ) - web_server_network_access_control = proto.Field( + web_server_network_access_control: "WebServerNetworkAccessControl" = proto.Field( proto.MESSAGE, number=8, message="WebServerNetworkAccessControl", ) - database_config = proto.Field( + database_config: "DatabaseConfig" = proto.Field( proto.MESSAGE, number=9, message="DatabaseConfig", ) - web_server_config = proto.Field( + web_server_config: "WebServerConfig" = proto.Field( proto.MESSAGE, number=10, message="WebServerConfig", ) - encryption_config = proto.Field( + encryption_config: "EncryptionConfig" = proto.Field( proto.MESSAGE, number=11, message="EncryptionConfig", ) - airflow_uri = proto.Field( + airflow_uri: str = proto.Field( proto.STRING, number=6, ) @@ -441,7 +443,7 @@ class WebServerNetworkAccessControl(proto.Message): server. Attributes: - allowed_ip_ranges (Sequence[google.cloud.orchestration.airflow.service_v1.types.WebServerNetworkAccessControl.AllowedIpRange]): + allowed_ip_ranges (MutableSequence[google.cloud.orchestration.airflow.service_v1.types.WebServerNetworkAccessControl.AllowedIpRange]): A collection of allowed IP ranges with descriptions. """ @@ -465,16 +467,16 @@ class AllowedIpRange(proto.Message): contain at most 300 characters. """ - value = proto.Field( + value: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=2, ) - allowed_ip_ranges = proto.RepeatedField( + allowed_ip_ranges: MutableSequence[AllowedIpRange] = proto.RepeatedField( proto.MESSAGE, number=1, message=AllowedIpRange, @@ -494,7 +496,7 @@ class DatabaseConfig(proto.Message): specified, db-n1-standard-2 will be used. """ - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=1, ) @@ -516,7 +518,7 @@ class WebServerConfig(proto.Message): manually changed to a non-standard values. """ - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=1, ) @@ -534,7 +536,7 @@ class EncryptionConfig(proto.Message): Google-managed key will be used. """ - kms_key_name = proto.Field( + kms_key_name: str = proto.Field( proto.STRING, number=1, ) @@ -569,7 +571,7 @@ class SoftwareConfig(proto.Message): See also `Version List `__. - airflow_config_overrides (Mapping[str, str]): + airflow_config_overrides (MutableMapping[str, str]): Optional. Apache Airflow configuration properties to override. @@ -589,7 +591,7 @@ class SoftwareConfig(proto.Message): Certain Apache Airflow configuration property values are `blocked `__, and cannot be overridden. - pypi_packages (Mapping[str, str]): + pypi_packages (MutableMapping[str, str]): Optional. Custom Python Package Index (PyPI) packages to be installed in the environment. @@ -598,7 +600,7 @@ class SoftwareConfig(proto.Message): as "==1.12.0", "[devel,gcp_api]", or "[devel]>=1.8.2, <1.9.2". To specify a package without pinning it to a version specifier, use the empty string as the value. - env_variables (Mapping[str, str]): + env_variables (MutableMapping[str, str]): Optional. Additional environment variables to provide to the Apache Airflow scheduler, worker, and webserver processes. @@ -629,26 +631,26 @@ class SoftwareConfig(proto.Message): default is '3'. Cannot be updated. """ - image_version = proto.Field( + image_version: str = proto.Field( proto.STRING, number=1, ) - airflow_config_overrides = proto.MapField( + airflow_config_overrides: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=2, ) - pypi_packages = proto.MapField( + pypi_packages: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, ) - env_variables = proto.MapField( + env_variables: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=4, ) - python_version = proto.Field( + python_version: str = proto.Field( proto.STRING, number=6, ) @@ -727,26 +729,26 @@ class IPAllocationPolicy(proto.Message): This field is a member of `oneof`_ ``services_ip_allocation``. """ - use_ip_aliases = proto.Field( + use_ip_aliases: bool = proto.Field( proto.BOOL, number=1, ) - cluster_secondary_range_name = proto.Field( + cluster_secondary_range_name: str = proto.Field( proto.STRING, number=2, oneof="cluster_ip_allocation", ) - cluster_ipv4_cidr_block = proto.Field( + cluster_ipv4_cidr_block: str = proto.Field( proto.STRING, number=4, oneof="cluster_ip_allocation", ) - services_secondary_range_name = proto.Field( + services_secondary_range_name: str = proto.Field( proto.STRING, number=3, oneof="services_ip_allocation", ) - services_ipv4_cidr_block = proto.Field( + services_ipv4_cidr_block: str = proto.Field( proto.STRING, number=5, oneof="services_ip_allocation", @@ -827,7 +829,7 @@ class NodeConfig(proto.Message): Optional. The disk size in GB used for node VMs. Minimum size is 20GB. If unspecified, defaults to 100GB. Cannot be updated. - oauth_scopes (Sequence[str]): + oauth_scopes (MutableSequence[str]): Optional. The set of Google API scopes to be made available on all node VMs. If ``oauth_scopes`` is empty, defaults to ["https://www.googleapis.com/auth/cloud-platform"]. Cannot @@ -838,7 +840,7 @@ class NodeConfig(proto.Message): account is not specified, the "default" Compute Engine service account is used. Cannot be updated. - tags (Sequence[str]): + tags (MutableSequence[str]): Optional. The list of instance tags applied to all node VMs. Tags are used to identify valid sources or targets for network firewalls. Each tag within the list must comply with @@ -849,39 +851,39 @@ class NodeConfig(proto.Message): how IPs are allocated in the GKE cluster. """ - location = proto.Field( + location: str = proto.Field( proto.STRING, number=1, ) - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=2, ) - network = proto.Field( + network: str = proto.Field( proto.STRING, number=3, ) - subnetwork = proto.Field( + subnetwork: str = proto.Field( proto.STRING, number=4, ) - disk_size_gb = proto.Field( + disk_size_gb: int = proto.Field( proto.INT32, number=5, ) - oauth_scopes = proto.RepeatedField( + oauth_scopes: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=6, ) - service_account = proto.Field( + service_account: str = proto.Field( proto.STRING, number=7, ) - tags = proto.RepeatedField( + tags: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=8, ) - ip_allocation_policy = proto.Field( + ip_allocation_policy: "IPAllocationPolicy" = proto.Field( proto.MESSAGE, number=9, message="IPAllocationPolicy", @@ -911,15 +913,15 @@ class PrivateClusterConfig(proto.Message): within the cluster's network. """ - enable_private_endpoint = proto.Field( + enable_private_endpoint: bool = proto.Field( proto.BOOL, number=1, ) - master_ipv4_cidr_block = proto.Field( + master_ipv4_cidr_block: str = proto.Field( proto.STRING, number=2, ) - master_ipv4_reserved_range = proto.Field( + master_ipv4_reserved_range: str = proto.Field( proto.STRING, number=3, ) @@ -952,24 +954,24 @@ class PrivateEnvironmentConfig(proto.Message): tenant project's App Engine VMs. """ - enable_private_environment = proto.Field( + enable_private_environment: bool = proto.Field( proto.BOOL, number=1, ) - private_cluster_config = proto.Field( + private_cluster_config: "PrivateClusterConfig" = proto.Field( proto.MESSAGE, number=2, message="PrivateClusterConfig", ) - web_server_ipv4_cidr_block = proto.Field( + web_server_ipv4_cidr_block: str = proto.Field( proto.STRING, number=3, ) - cloud_sql_ipv4_cidr_block = proto.Field( + cloud_sql_ipv4_cidr_block: str = proto.Field( proto.STRING, number=4, ) - web_server_ipv4_reserved_range = proto.Field( + web_server_ipv4_reserved_range: str = proto.Field( proto.STRING, number=5, ) @@ -1002,7 +1004,7 @@ class Environment(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time at which this environment was last modified. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Optional. User-defined labels for this environment. The labels map can contain no more than 64 entries. Entries of the labels map are UTF8 strings that comply with the @@ -1025,35 +1027,35 @@ class State(proto.Enum): DELETING = 4 ERROR = 5 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - config = proto.Field( + config: "EnvironmentConfig" = proto.Field( proto.MESSAGE, number=2, message="EnvironmentConfig", ) - uuid = proto.Field( + uuid: str = proto.Field( proto.STRING, number=3, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=4, enum=State, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=7, @@ -1078,7 +1080,7 @@ class CheckUpgradeResponse(proto.Message): image_version (str): Composer image for which the build was happening. - pypi_dependencies (Mapping[str, str]): + pypi_dependencies (MutableMapping[str, str]): Pypi dependencies specified in the environment configuration, at the time when the build was triggered. @@ -1092,24 +1094,24 @@ class ConflictResult(proto.Enum): CONFLICT = 1 NO_CONFLICT = 2 - build_log_uri = proto.Field( + build_log_uri: str = proto.Field( proto.STRING, number=1, ) - contains_pypi_modules_conflict = proto.Field( + contains_pypi_modules_conflict: ConflictResult = proto.Field( proto.ENUM, number=4, enum=ConflictResult, ) - pypi_conflict_build_log_extract = proto.Field( + pypi_conflict_build_log_extract: str = proto.Field( proto.STRING, number=3, ) - image_version = proto.Field( + image_version: str = proto.Field( proto.STRING, number=5, ) - pypi_dependencies = proto.MapField( + pypi_dependencies: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=6, diff --git a/google/cloud/orchestration/airflow/service_v1/types/image_versions.py b/google/cloud/orchestration/airflow/service_v1/types/image_versions.py index 0344378..d338b01 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/image_versions.py +++ b/google/cloud/orchestration/airflow/service_v1/types/image_versions.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + from google.type import date_pb2 # type: ignore import proto # type: ignore @@ -44,19 +46,19 @@ class ListImageVersionsRequest(proto.Message): releases should be included. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) - include_past_releases = proto.Field( + include_past_releases: bool = proto.Field( proto.BOOL, number=4, ) @@ -66,7 +68,7 @@ class ListImageVersionsResponse(proto.Message): r"""The ImageVersions in a project and location. Attributes: - image_versions (Sequence[google.cloud.orchestration.airflow.service_v1.types.ImageVersion]): + image_versions (MutableSequence[google.cloud.orchestration.airflow.service_v1.types.ImageVersion]): The list of supported ImageVersions in a location. next_page_token (str): @@ -78,12 +80,12 @@ class ListImageVersionsResponse(proto.Message): def raw_page(self): return self - image_versions = proto.RepeatedField( + image_versions: MutableSequence["ImageVersion"] = proto.RepeatedField( proto.MESSAGE, number=1, message="ImageVersion", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -100,7 +102,7 @@ class ImageVersion(proto.Message): Whether this is the default ImageVersion used by Composer during environment creation if no input ImageVersion is specified. - supported_python_versions (Sequence[str]): + supported_python_versions (MutableSequence[str]): supported python versions release_date (google.type.date_pb2.Date): The date of the version release. @@ -112,28 +114,28 @@ class ImageVersion(proto.Message): environment running with the image version. """ - image_version_id = proto.Field( + image_version_id: str = proto.Field( proto.STRING, number=1, ) - is_default = proto.Field( + is_default: bool = proto.Field( proto.BOOL, number=2, ) - supported_python_versions = proto.RepeatedField( + supported_python_versions: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - release_date = proto.Field( + release_date: date_pb2.Date = proto.Field( proto.MESSAGE, number=4, message=date_pb2.Date, ) - creation_disabled = proto.Field( + creation_disabled: bool = proto.Field( proto.BOOL, number=5, ) - upgrade_disabled = proto.Field( + upgrade_disabled: bool = proto.Field( proto.BOOL, number=6, ) diff --git a/google/cloud/orchestration/airflow/service_v1/types/operations.py b/google/cloud/orchestration/airflow/service_v1/types/operations.py index ac031d7..dbafb35 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/operations.py +++ b/google/cloud/orchestration/airflow/service_v1/types/operations.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -68,30 +70,30 @@ class Type(proto.Enum): UPDATE = 3 CHECK = 4 - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=1, enum=State, ) - operation_type = proto.Field( + operation_type: Type = proto.Field( proto.ENUM, number=2, enum=Type, ) - resource = proto.Field( + resource: str = proto.Field( proto.STRING, number=3, ) - resource_uuid = proto.Field( + resource_uuid: str = proto.Field( proto.STRING, number=4, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/__init__.py b/google/cloud/orchestration/airflow/service_v1beta1/__init__.py index 6616359..c4fdc7f 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.orchestration.airflow.service import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.environments import EnvironmentsAsyncClient, EnvironmentsClient from .services.image_versions import ImageVersionsAsyncClient, ImageVersionsClient diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py index 7af697a..05b0464 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -166,9 +176,9 @@ def transport(self) -> EnvironmentsTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, EnvironmentsTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the environments client. @@ -212,12 +222,12 @@ def __init__( async def create_environment( self, - request: Union[environments.CreateEnvironmentRequest, dict] = None, + request: Optional[Union[environments.CreateEnvironmentRequest, dict]] = None, *, - parent: str = None, - environment: environments.Environment = None, + parent: Optional[str] = None, + environment: Optional[environments.Environment] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Create a new environment. @@ -252,7 +262,7 @@ async def sample_create_environment(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.CreateEnvironmentRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.CreateEnvironmentRequest, dict]]): The request object. Create a new environment. parent (:class:`str`): The parent must be of the form @@ -335,11 +345,11 @@ async def sample_create_environment(): async def get_environment( self, - request: Union[environments.GetEnvironmentRequest, dict] = None, + request: Optional[Union[environments.GetEnvironmentRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> environments.Environment: r"""Get an existing environment. @@ -370,7 +380,7 @@ async def sample_get_environment(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.GetEnvironmentRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.GetEnvironmentRequest, dict]]): The request object. Get an environment. name (:class:`str`): The resource name of the environment @@ -436,11 +446,11 @@ async def sample_get_environment(): async def list_environments( self, - request: Union[environments.ListEnvironmentsRequest, dict] = None, + request: Optional[Union[environments.ListEnvironmentsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEnvironmentsAsyncPager: r"""List environments. @@ -472,7 +482,7 @@ async def sample_list_environments(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.ListEnvironmentsRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.ListEnvironmentsRequest, dict]]): The request object. List environments in a project and location. parent (:class:`str`): @@ -551,13 +561,13 @@ async def sample_list_environments(): async def update_environment( self, - request: Union[environments.UpdateEnvironmentRequest, dict] = None, + request: Optional[Union[environments.UpdateEnvironmentRequest, dict]] = None, *, - name: str = None, - environment: environments.Environment = None, - update_mask: field_mask_pb2.FieldMask = None, + name: Optional[str] = None, + environment: Optional[environments.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Update an environment. @@ -592,7 +602,7 @@ async def sample_update_environment(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.UpdateEnvironmentRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.UpdateEnvironmentRequest, dict]]): The request object. Update an environment. name (:class:`str`): The relative resource name of the @@ -868,11 +878,11 @@ async def sample_update_environment(): async def delete_environment( self, - request: Union[environments.DeleteEnvironmentRequest, dict] = None, + request: Optional[Union[environments.DeleteEnvironmentRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Delete an environment. @@ -907,7 +917,7 @@ async def sample_delete_environment(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.DeleteEnvironmentRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.DeleteEnvironmentRequest, dict]]): The request object. Delete an environment. name (:class:`str`): The environment to delete, in the @@ -991,10 +1001,10 @@ async def sample_delete_environment(): async def restart_web_server( self, - request: Union[environments.RestartWebServerRequest, dict] = None, + request: Optional[Union[environments.RestartWebServerRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Restart Airflow web server. @@ -1029,7 +1039,7 @@ async def sample_restart_web_server(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.RestartWebServerRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.RestartWebServerRequest, dict]]): The request object. Restart Airflow web server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1084,10 +1094,10 @@ async def sample_restart_web_server(): async def check_upgrade( self, - request: Union[environments.CheckUpgradeRequest, dict] = None, + request: Optional[Union[environments.CheckUpgradeRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Check if an upgrade operation on the environment will @@ -1125,7 +1135,7 @@ async def sample_check_upgrade(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.CheckUpgradeRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.CheckUpgradeRequest, dict]]): The request object. Request to check whether image upgrade will succeed. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py index ff6941e..fad5038 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -67,7 +78,7 @@ class EnvironmentsClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[EnvironmentsTransport]: """Returns an appropriate transport class. @@ -344,8 +355,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, EnvironmentsTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, EnvironmentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the environments client. @@ -359,7 +370,7 @@ def __init__( transport (Union[str, EnvironmentsTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -389,6 +400,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -441,12 +453,12 @@ def __init__( def create_environment( self, - request: Union[environments.CreateEnvironmentRequest, dict] = None, + request: Optional[Union[environments.CreateEnvironmentRequest, dict]] = None, *, - parent: str = None, - environment: environments.Environment = None, + parent: Optional[str] = None, + environment: Optional[environments.Environment] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Create a new environment. @@ -564,11 +576,11 @@ def sample_create_environment(): def get_environment( self, - request: Union[environments.GetEnvironmentRequest, dict] = None, + request: Optional[Union[environments.GetEnvironmentRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> environments.Environment: r"""Get an existing environment. @@ -665,11 +677,11 @@ def sample_get_environment(): def list_environments( self, - request: Union[environments.ListEnvironmentsRequest, dict] = None, + request: Optional[Union[environments.ListEnvironmentsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEnvironmentsPager: r"""List environments. @@ -780,13 +792,13 @@ def sample_list_environments(): def update_environment( self, - request: Union[environments.UpdateEnvironmentRequest, dict] = None, + request: Optional[Union[environments.UpdateEnvironmentRequest, dict]] = None, *, - name: str = None, - environment: environments.Environment = None, - update_mask: field_mask_pb2.FieldMask = None, + name: Optional[str] = None, + environment: Optional[environments.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Update an environment. @@ -1097,11 +1109,11 @@ def sample_update_environment(): def delete_environment( self, - request: Union[environments.DeleteEnvironmentRequest, dict] = None, + request: Optional[Union[environments.DeleteEnvironmentRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Delete an environment. @@ -1220,10 +1232,10 @@ def sample_delete_environment(): def restart_web_server( self, - request: Union[environments.RestartWebServerRequest, dict] = None, + request: Optional[Union[environments.RestartWebServerRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Restart Airflow web server. @@ -1314,10 +1326,10 @@ def sample_restart_web_server(): def check_upgrade( self, - request: Union[environments.CheckUpgradeRequest, dict] = None, + request: Optional[Union[environments.CheckUpgradeRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Check if an upgrade operation on the environment will diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py index e6e12ed..fa7092c 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py @@ -49,7 +49,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py index 69fbe64..cef66e2 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py @@ -47,14 +47,14 @@ def __init__( self, *, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -182,8 +182,8 @@ def __init__( def create_channel( cls, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py index 4c0d1f0..a3ac7d7 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py @@ -49,7 +49,7 @@ class EnvironmentsGrpcAsyncIOTransport(EnvironmentsTransport): def create_channel( cls, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -92,15 +92,15 @@ def __init__( self, *, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py index 1bf66a4..0bf4539 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -159,9 +169,9 @@ def transport(self) -> ImageVersionsTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ImageVersionsTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the image versions client. @@ -205,11 +215,11 @@ def __init__( async def list_image_versions( self, - request: Union[image_versions.ListImageVersionsRequest, dict] = None, + request: Optional[Union[image_versions.ListImageVersionsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListImageVersionsAsyncPager: r"""List ImageVersions for provided location. @@ -241,7 +251,7 @@ async def sample_list_image_versions(): print(response) Args: - request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.ListImageVersionsRequest, dict]): + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.ListImageVersionsRequest, dict]]): The request object. List ImageVersions in a project and location. parent (:class:`str`): diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py index f85918d..d733c4f 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -58,7 +69,7 @@ class ImageVersionsClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[ImageVersionsTransport]: """Returns an appropriate transport class. @@ -311,8 +322,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ImageVersionsTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, ImageVersionsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the image versions client. @@ -326,7 +337,7 @@ def __init__( transport (Union[str, ImageVersionsTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -356,6 +367,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -408,11 +420,11 @@ def __init__( def list_image_versions( self, - request: Union[image_versions.ListImageVersionsRequest, dict] = None, + request: Optional[Union[image_versions.ListImageVersionsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListImageVersionsPager: r"""List ImageVersions for provided location. diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/base.py b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/base.py index c0f85f1..102d5b3 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/base.py @@ -48,7 +48,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/grpc.py b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/grpc.py index 504b3d8..48d3713 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/grpc.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/grpc.py @@ -46,14 +46,14 @@ def __init__( self, *, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -180,8 +180,8 @@ def __init__( def create_channel( cls, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/grpc_asyncio.py b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/grpc_asyncio.py index b6ab133..87f8383 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/grpc_asyncio.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/grpc_asyncio.py @@ -48,7 +48,7 @@ class ImageVersionsGrpcAsyncIOTransport(ImageVersionsTransport): def create_channel( cls, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -91,15 +91,15 @@ def __init__( self, *, host: str = "composer.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py b/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py index 5f4387d..c5f3293 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -57,11 +59,11 @@ class CreateEnvironmentRequest(proto.Message): The environment to create. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - environment = proto.Field( + environment: "Environment" = proto.Field( proto.MESSAGE, number=2, message="Environment", @@ -78,7 +80,7 @@ class GetEnvironmentRequest(proto.Message): "projects/{projectId}/locations/{locationId}/environments/{environmentId}". """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -99,15 +101,15 @@ class ListEnvironmentsRequest(proto.Message): request, if any. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -117,7 +119,7 @@ class ListEnvironmentsResponse(proto.Message): r"""The environments in a project and location. Attributes: - environments (Sequence[google.cloud.orchestration.airflow.service_v1beta1.types.Environment]): + environments (MutableSequence[google.cloud.orchestration.airflow.service_v1beta1.types.Environment]): The list of environments returned by a ListEnvironmentsRequest. next_page_token (str): @@ -129,12 +131,12 @@ class ListEnvironmentsResponse(proto.Message): def raw_page(self): return self - environments = proto.RepeatedField( + environments: MutableSequence["Environment"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Environment", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -149,7 +151,7 @@ class DeleteEnvironmentRequest(proto.Message): "projects/{projectId}/locations/{locationId}/environments/{environmentId}". """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -341,16 +343,16 @@ class UpdateEnvironmentRequest(proto.Message): components may be under maintenance. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=2, ) - environment = proto.Field( + environment: "Environment" = proto.Field( proto.MESSAGE, number=1, message="Environment", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, @@ -367,7 +369,7 @@ class RestartWebServerRequest(proto.Message): "projects/{projectId}/locations/{locationId}/environments/{environmentId}". """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -463,68 +465,68 @@ class EnvironmentSize(proto.Enum): ENVIRONMENT_SIZE_MEDIUM = 2 ENVIRONMENT_SIZE_LARGE = 3 - gke_cluster = proto.Field( + gke_cluster: str = proto.Field( proto.STRING, number=1, ) - dag_gcs_prefix = proto.Field( + dag_gcs_prefix: str = proto.Field( proto.STRING, number=2, ) - node_count = proto.Field( + node_count: int = proto.Field( proto.INT32, number=3, ) - software_config = proto.Field( + software_config: "SoftwareConfig" = proto.Field( proto.MESSAGE, number=4, message="SoftwareConfig", ) - node_config = proto.Field( + node_config: "NodeConfig" = proto.Field( proto.MESSAGE, number=5, message="NodeConfig", ) - private_environment_config = proto.Field( + private_environment_config: "PrivateEnvironmentConfig" = proto.Field( proto.MESSAGE, number=7, message="PrivateEnvironmentConfig", ) - web_server_network_access_control = proto.Field( + web_server_network_access_control: "WebServerNetworkAccessControl" = proto.Field( proto.MESSAGE, number=9, message="WebServerNetworkAccessControl", ) - database_config = proto.Field( + database_config: "DatabaseConfig" = proto.Field( proto.MESSAGE, number=10, message="DatabaseConfig", ) - web_server_config = proto.Field( + web_server_config: "WebServerConfig" = proto.Field( proto.MESSAGE, number=11, message="WebServerConfig", ) - airflow_uri = proto.Field( + airflow_uri: str = proto.Field( proto.STRING, number=6, ) - encryption_config = proto.Field( + encryption_config: "EncryptionConfig" = proto.Field( proto.MESSAGE, number=12, message="EncryptionConfig", ) - maintenance_window = proto.Field( + maintenance_window: "MaintenanceWindow" = proto.Field( proto.MESSAGE, number=13, message="MaintenanceWindow", ) - workloads_config = proto.Field( + workloads_config: "WorkloadsConfig" = proto.Field( proto.MESSAGE, number=15, message="WorkloadsConfig", ) - environment_size = proto.Field( + environment_size: EnvironmentSize = proto.Field( proto.ENUM, number=16, enum=EnvironmentSize, @@ -536,7 +538,7 @@ class WebServerNetworkAccessControl(proto.Message): server. Attributes: - allowed_ip_ranges (Sequence[google.cloud.orchestration.airflow.service_v1beta1.types.WebServerNetworkAccessControl.AllowedIpRange]): + allowed_ip_ranges (MutableSequence[google.cloud.orchestration.airflow.service_v1beta1.types.WebServerNetworkAccessControl.AllowedIpRange]): A collection of allowed IP ranges with descriptions. """ @@ -560,16 +562,16 @@ class AllowedIpRange(proto.Message): contain at most 300 characters. """ - value = proto.Field( + value: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=2, ) - allowed_ip_ranges = proto.RepeatedField( + allowed_ip_ranges: MutableSequence[AllowedIpRange] = proto.RepeatedField( proto.MESSAGE, number=1, message=AllowedIpRange, @@ -605,7 +607,7 @@ class SoftwareConfig(proto.Message): See also `Version List `__. - airflow_config_overrides (Mapping[str, str]): + airflow_config_overrides (MutableMapping[str, str]): Optional. Apache Airflow configuration properties to override. @@ -625,7 +627,7 @@ class SoftwareConfig(proto.Message): Certain Apache Airflow configuration property values are `blocked `__, and cannot be overridden. - pypi_packages (Mapping[str, str]): + pypi_packages (MutableMapping[str, str]): Optional. Custom Python Package Index (PyPI) packages to be installed in the environment. @@ -634,7 +636,7 @@ class SoftwareConfig(proto.Message): as "==1.12.0", "[devel,gcp_api]", or "[devel]>=1.8.2, <1.9.2". To specify a package without pinning it to a version specifier, use the empty string as the value. - env_variables (Mapping[str, str]): + env_variables (MutableMapping[str, str]): Optional. Additional environment variables to provide to the Apache Airflow scheduler, worker, and webserver processes. @@ -665,26 +667,26 @@ class SoftwareConfig(proto.Message): default is '3'. Cannot be updated. """ - image_version = proto.Field( + image_version: str = proto.Field( proto.STRING, number=1, ) - airflow_config_overrides = proto.MapField( + airflow_config_overrides: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=2, ) - pypi_packages = proto.MapField( + pypi_packages: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, ) - env_variables = proto.MapField( + env_variables: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=4, ) - python_version = proto.Field( + python_version: str = proto.Field( proto.STRING, number=6, ) @@ -756,23 +758,23 @@ class IPAllocationPolicy(proto.Message): ``services_ipv4_cidr_block`` but not both. """ - use_ip_aliases = proto.Field( + use_ip_aliases: bool = proto.Field( proto.BOOL, number=1, ) - cluster_secondary_range_name = proto.Field( + cluster_secondary_range_name: str = proto.Field( proto.STRING, number=2, ) - services_secondary_range_name = proto.Field( + services_secondary_range_name: str = proto.Field( proto.STRING, number=3, ) - cluster_ipv4_cidr_block = proto.Field( + cluster_ipv4_cidr_block: str = proto.Field( proto.STRING, number=4, ) - services_ipv4_cidr_block = proto.Field( + services_ipv4_cidr_block: str = proto.Field( proto.STRING, number=5, ) @@ -852,7 +854,7 @@ class NodeConfig(proto.Message): Optional. The disk size in GB used for node VMs. Minimum size is 20GB. If unspecified, defaults to 100GB. Cannot be updated. - oauth_scopes (Sequence[str]): + oauth_scopes (MutableSequence[str]): Optional. The set of Google API scopes to be made available on all node VMs. If ``oauth_scopes`` is empty, defaults to ["https://www.googleapis.com/auth/cloud-platform"]. Cannot @@ -863,7 +865,7 @@ class NodeConfig(proto.Message): service account is not specified, the "default" Compute Engine service account is used. Cannot be updated. - tags (Sequence[str]): + tags (MutableSequence[str]): Optional. The list of instance tags applied to all node VMs. Tags are used to identify valid sources or targets for network firewalls. Each tag within the list must comply with @@ -886,44 +888,44 @@ class NodeConfig(proto.Message): Cannot be updated. """ - location = proto.Field( + location: str = proto.Field( proto.STRING, number=1, ) - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=2, ) - network = proto.Field( + network: str = proto.Field( proto.STRING, number=3, ) - subnetwork = proto.Field( + subnetwork: str = proto.Field( proto.STRING, number=4, ) - disk_size_gb = proto.Field( + disk_size_gb: int = proto.Field( proto.INT32, number=5, ) - oauth_scopes = proto.RepeatedField( + oauth_scopes: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=6, ) - service_account = proto.Field( + service_account: str = proto.Field( proto.STRING, number=7, ) - tags = proto.RepeatedField( + tags: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=8, ) - ip_allocation_policy = proto.Field( + ip_allocation_policy: "IPAllocationPolicy" = proto.Field( proto.MESSAGE, number=9, message="IPAllocationPolicy", ) - max_pods_per_node = proto.Field( + max_pods_per_node: int = proto.Field( proto.INT32, number=10, ) @@ -952,15 +954,15 @@ class PrivateClusterConfig(proto.Message): within the cluster's network. """ - enable_private_endpoint = proto.Field( + enable_private_endpoint: bool = proto.Field( proto.BOOL, number=1, ) - master_ipv4_cidr_block = proto.Field( + master_ipv4_cidr_block: str = proto.Field( proto.STRING, number=2, ) - master_ipv4_reserved_range = proto.Field( + master_ipv4_reserved_range: str = proto.Field( proto.STRING, number=3, ) @@ -1008,32 +1010,32 @@ class PrivateEnvironmentConfig(proto.Message): versions composer-2.\ *.*-airflow-*.*.\* and newer. """ - enable_private_environment = proto.Field( + enable_private_environment: bool = proto.Field( proto.BOOL, number=1, ) - private_cluster_config = proto.Field( + private_cluster_config: "PrivateClusterConfig" = proto.Field( proto.MESSAGE, number=2, message="PrivateClusterConfig", ) - web_server_ipv4_cidr_block = proto.Field( + web_server_ipv4_cidr_block: str = proto.Field( proto.STRING, number=3, ) - cloud_sql_ipv4_cidr_block = proto.Field( + cloud_sql_ipv4_cidr_block: str = proto.Field( proto.STRING, number=4, ) - web_server_ipv4_reserved_range = proto.Field( + web_server_ipv4_reserved_range: str = proto.Field( proto.STRING, number=5, ) - cloud_composer_network_ipv4_cidr_block = proto.Field( + cloud_composer_network_ipv4_cidr_block: str = proto.Field( proto.STRING, number=7, ) - cloud_composer_network_ipv4_reserved_range = proto.Field( + cloud_composer_network_ipv4_reserved_range: str = proto.Field( proto.STRING, number=8, ) @@ -1052,7 +1054,7 @@ class DatabaseConfig(proto.Message): specified, db-n1-standard-2 will be used. """ - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=1, ) @@ -1074,7 +1076,7 @@ class WebServerConfig(proto.Message): manually changed to a non-standard values. """ - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=1, ) @@ -1092,7 +1094,7 @@ class EncryptionConfig(proto.Message): Google-managed key will be used. """ - kms_key_name = proto.Field( + kms_key_name: str = proto.Field( proto.STRING, number=1, ) @@ -1130,17 +1132,17 @@ class MaintenanceWindow(proto.Message): ``FREQ=WEEKLY;BYDAY=TU,WE``, ``FREQ=DAILY``. """ - start_time = proto.Field( + start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - recurrence = proto.Field( + recurrence: str = proto.Field( proto.STRING, number=3, ) @@ -1179,19 +1181,19 @@ class SchedulerResource(proto.Message): Optional. The number of schedulers. """ - cpu = proto.Field( + cpu: float = proto.Field( proto.FLOAT, number=1, ) - memory_gb = proto.Field( + memory_gb: float = proto.Field( proto.FLOAT, number=2, ) - storage_gb = proto.Field( + storage_gb: float = proto.Field( proto.FLOAT, number=3, ) - count = proto.Field( + count: int = proto.Field( proto.INT32, number=4, ) @@ -1211,15 +1213,15 @@ class WebServerResource(proto.Message): Airflow web server. """ - cpu = proto.Field( + cpu: float = proto.Field( proto.FLOAT, number=1, ) - memory_gb = proto.Field( + memory_gb: float = proto.Field( proto.FLOAT, number=2, ) - storage_gb = proto.Field( + storage_gb: float = proto.Field( proto.FLOAT, number=3, ) @@ -1245,38 +1247,38 @@ class WorkerResource(proto.Message): autoscaling. """ - cpu = proto.Field( + cpu: float = proto.Field( proto.FLOAT, number=1, ) - memory_gb = proto.Field( + memory_gb: float = proto.Field( proto.FLOAT, number=2, ) - storage_gb = proto.Field( + storage_gb: float = proto.Field( proto.FLOAT, number=3, ) - min_count = proto.Field( + min_count: int = proto.Field( proto.INT32, number=4, ) - max_count = proto.Field( + max_count: int = proto.Field( proto.INT32, number=5, ) - scheduler = proto.Field( + scheduler: SchedulerResource = proto.Field( proto.MESSAGE, number=1, message=SchedulerResource, ) - web_server = proto.Field( + web_server: WebServerResource = proto.Field( proto.MESSAGE, number=2, message=WebServerResource, ) - worker = proto.Field( + worker: WorkerResource = proto.Field( proto.MESSAGE, number=3, message=WorkerResource, @@ -1310,7 +1312,7 @@ class Environment(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time at which this environment was last modified. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Optional. User-defined labels for this environment. The labels map can contain no more than 64 entries. Entries of the labels map are UTF8 strings that comply with the @@ -1333,35 +1335,35 @@ class State(proto.Enum): DELETING = 4 ERROR = 5 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - config = proto.Field( + config: "EnvironmentConfig" = proto.Field( proto.MESSAGE, number=2, message="EnvironmentConfig", ) - uuid = proto.Field( + uuid: str = proto.Field( proto.STRING, number=3, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=4, enum=State, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=7, @@ -1402,11 +1404,11 @@ class CheckUpgradeRequest(proto.Message): (/composer/docs/concepts/versioning/composer-versions). """ - environment = proto.Field( + environment: str = proto.Field( proto.STRING, number=1, ) - image_version = proto.Field( + image_version: str = proto.Field( proto.STRING, number=2, ) @@ -1430,7 +1432,7 @@ class CheckUpgradeResponse(proto.Message): image_version (str): Composer image for which the build was happening. - pypi_dependencies (Mapping[str, str]): + pypi_dependencies (MutableMapping[str, str]): Pypi dependencies specified in the environment configuration, at the time when the build was triggered. @@ -1444,24 +1446,24 @@ class ConflictResult(proto.Enum): CONFLICT = 1 NO_CONFLICT = 2 - build_log_uri = proto.Field( + build_log_uri: str = proto.Field( proto.STRING, number=1, ) - contains_pypi_modules_conflict = proto.Field( + contains_pypi_modules_conflict: ConflictResult = proto.Field( proto.ENUM, number=4, enum=ConflictResult, ) - pypi_conflict_build_log_extract = proto.Field( + pypi_conflict_build_log_extract: str = proto.Field( proto.STRING, number=3, ) - image_version = proto.Field( + image_version: str = proto.Field( proto.STRING, number=5, ) - pypi_dependencies = proto.MapField( + pypi_dependencies: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=6, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py b/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py index ba2e513..c23f704 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + from google.type import date_pb2 # type: ignore import proto # type: ignore @@ -44,19 +46,19 @@ class ListImageVersionsRequest(proto.Message): releases should be included. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) - include_past_releases = proto.Field( + include_past_releases: bool = proto.Field( proto.BOOL, number=4, ) @@ -66,7 +68,7 @@ class ListImageVersionsResponse(proto.Message): r"""The ImageVersions in a project and location. Attributes: - image_versions (Sequence[google.cloud.orchestration.airflow.service_v1beta1.types.ImageVersion]): + image_versions (MutableSequence[google.cloud.orchestration.airflow.service_v1beta1.types.ImageVersion]): The list of supported ImageVersions in a location. next_page_token (str): @@ -78,12 +80,12 @@ class ListImageVersionsResponse(proto.Message): def raw_page(self): return self - image_versions = proto.RepeatedField( + image_versions: MutableSequence["ImageVersion"] = proto.RepeatedField( proto.MESSAGE, number=1, message="ImageVersion", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -100,7 +102,7 @@ class ImageVersion(proto.Message): Whether this is the default ImageVersion used by Composer during environment creation if no input ImageVersion is specified. - supported_python_versions (Sequence[str]): + supported_python_versions (MutableSequence[str]): supported python versions release_date (google.type.date_pb2.Date): The date of the version release. @@ -112,28 +114,28 @@ class ImageVersion(proto.Message): environment running with the image version. """ - image_version_id = proto.Field( + image_version_id: str = proto.Field( proto.STRING, number=1, ) - is_default = proto.Field( + is_default: bool = proto.Field( proto.BOOL, number=2, ) - supported_python_versions = proto.RepeatedField( + supported_python_versions: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - release_date = proto.Field( + release_date: date_pb2.Date = proto.Field( proto.MESSAGE, number=4, message=date_pb2.Date, ) - creation_disabled = proto.Field( + creation_disabled: bool = proto.Field( proto.BOOL, number=5, ) - upgrade_disabled = proto.Field( + upgrade_disabled: bool = proto.Field( proto.BOOL, number=6, ) diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py b/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py index b9b7637..2f1bd5c 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -66,30 +68,30 @@ class Type(proto.Enum): UPDATE = 3 CHECK = 4 - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=1, enum=State, ) - operation_type = proto.Field( + operation_type: Type = proto.Field( proto.ENUM, number=2, enum=Type, ) - resource = proto.Field( + resource: str = proto.Field( proto.STRING, number=3, ) - resource_uuid = proto.Field( + resource_uuid: str = proto.Field( proto.STRING, number=4, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, diff --git a/owlbot.py b/owlbot.py new file mode 100644 index 0000000..ce738f0 --- /dev/null +++ b/owlbot.py @@ -0,0 +1,56 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from pathlib import Path +import shutil + +import synthtool as s +import synthtool.gcp as gcp +from synthtool.languages import python + +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- + +clean_up_generated_samples = True + +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get( + "default_version" +) + +for library in s.get_staging_dirs(default_version): + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False + s.move([library], excludes=["**/gapic_version.py"]) +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- + +templated_files = gcp.CommonTemplates().py_library( + cov_level=100, + microgenerator=True, + versions=gcp.common.detect_versions(path="./google", default_first=True), +) +s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml"]) + +python.py_samples(skip_readmes=True) + +# run format session for all directories which have a noxfile +for noxfile in Path(".").glob("**/noxfile.py"): + s.shell.run(["nox", "-s", "format"], cwd=noxfile.parent, hide_output=False) diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 0000000..45c8680 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,28 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "release-type": "python", + "extra-files": [ + "google/cloud/orchestration/airflow/service/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json", + "jsonpath": "$.clientLibrary.version" + }, + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json", + "jsonpath": "$.clientLibrary.version" + } + ] + } + }, + "release-type": "python", + "plugins": [ + { + "type": "sentence-case" + } + ], + "initial-version": "0.1.0" +} diff --git a/samples/generated_samples/snippet_metadata_service_v1.json b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json similarity index 99% rename from samples/generated_samples/snippet_metadata_service_v1.json rename to samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json index 82de22e..5ebdced 100644 --- a/samples/generated_samples/snippet_metadata_service_v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-orchestration-airflow" + "name": "google-cloud-orchestration-airflow", + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_service_v1beta1.json b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json similarity index 99% rename from samples/generated_samples/snippet_metadata_service_v1beta1.json rename to samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json index af411d0..b13cbb3 100644 --- a/samples/generated_samples/snippet_metadata_service_v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-orchestration-airflow-service" + "name": "google-cloud-orchestration-airflow-service", + "version": "0.1.0" }, "snippets": [ { diff --git a/setup.py b/setup.py index e7764e6..577f135 100644 --- a/setup.py +++ b/setup.py @@ -1,34 +1,50 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +# import io import os -import setuptools +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) name = "google-cloud-orchestration-airflow" -description = "Cloud Composer API client library" -version = "1.4.4" -release_status = "Development Status :: 5 - Production/Stable" -url = "https://github.com/googleapis/python-orchestration-airflow" + + +description = "Google Cloud Orchestration Airflow API client library" + +version = {} +with open( + os.path.join( + package_root, "google/cloud/orchestration/airflow/service/gapic_version.py" + ) +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + dependencies = [ - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] +url = "https://github.com/googleapis/python-orchestration-airflow" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -46,7 +62,6 @@ if "google.cloud" in packages: namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index e69de29..ed7f9ae 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index e69de29..ed7f9ae 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 4005dc5..6f3158c 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.32.0 +google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index e69de29..ed7f9ae 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index e69de29..ed7f9ae 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/tests/unit/gapic/service_v1/test_environments.py b/tests/unit/gapic/service_v1/test_environments.py index e1f4a7a..df22a3a 100644 --- a/tests/unit/gapic/service_v1/test_environments.py +++ b/tests/unit/gapic/service_v1/test_environments.py @@ -41,6 +41,7 @@ from google.auth.exceptions import MutualTLSChannelError from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import grpc diff --git a/tests/unit/gapic/service_v1beta1/test_environments.py b/tests/unit/gapic/service_v1beta1/test_environments.py index e72eb74..b878086 100644 --- a/tests/unit/gapic/service_v1beta1/test_environments.py +++ b/tests/unit/gapic/service_v1beta1/test_environments.py @@ -41,6 +41,7 @@ from google.auth.exceptions import MutualTLSChannelError from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import grpc From 6c8a0bf722793353ca9311410f245451bbdf437c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Dec 2022 15:28:13 +0000 Subject: [PATCH 4/7] feat: added LoadSnapshot, SaveSnapshot RPCs (#150) - [ ] Regenerate this pull request now. feat: added fields maintenance_window, workloads_config, environment_size, master_authorized_networks_config, recovery_config to EnvironmentConfig feat: added field scheduler_count to SoftwareConfig feat: added field enable_ip_masq_agent to NodeConfig feat: added fields cloud_composer_network_ipv4_cidr_block, cloud_composer_network_ipv4_reserved_range, enable_privately_used_public_ips, cloud_composer_connection_subnetwork, networking_config to PrivateEnvironmentConfig PiperOrigin-RevId: 492193414 Source-Link: https://togithub.com/googleapis/googleapis/commit/34eb8f1662a226c9cc6640066111ca2d3014f30d Source-Link: https://togithub.com/googleapis/googleapis-gen/commit/ac6572620255fbf06316b68019eec52b32c4cc8a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWM2NTcyNjIwMjU1ZmJmMDYzMTZiNjgwMTllZWM1MmIzMmM0Y2M4YSJ9 --- .../orchestration/airflow/service/__init__.py | 20 + .../airflow/service_v1/__init__.py | 20 + .../airflow/service_v1/gapic_metadata.json | 20 + .../services/environments/async_client.py | 242 +++++- .../services/environments/client.py | 244 +++++- .../services/environments/transports/base.py | 28 + .../services/environments/transports/grpc.py | 58 ++ .../environments/transports/grpc_asyncio.py | 62 ++ .../airflow/service_v1/types/__init__.py | 20 + .../airflow/service_v1/types/environments.py | 753 ++++++++++++++++-- .../service_v1/types/image_versions.py | 2 +- .../airflow/service_v1/types/operations.py | 2 + .../airflow/service_v1beta1/__init__.py | 22 + .../service_v1beta1/gapic_metadata.json | 20 + .../services/environments/async_client.py | 261 +++++- .../services/environments/client.py | 263 +++++- .../services/environments/transports/base.py | 28 + .../services/environments/transports/grpc.py | 58 ++ .../environments/transports/grpc_asyncio.py | 62 ++ .../airflow/service_v1beta1/types/__init__.py | 22 + .../service_v1beta1/types/environments.py | 722 +++++++++++++++-- .../service_v1beta1/types/image_versions.py | 2 +- .../service_v1beta1/types/operations.py | 2 + ...erated_environments_load_snapshot_async.py | 55 ++ ...nerated_environments_load_snapshot_sync.py | 55 ++ ...erated_environments_save_snapshot_async.py | 55 ++ ...nerated_environments_save_snapshot_sync.py | 55 ++ ...erated_environments_load_snapshot_async.py | 55 ++ ...nerated_environments_load_snapshot_sync.py | 55 ++ ...erated_environments_save_snapshot_async.py | 55 ++ ...nerated_environments_save_snapshot_sync.py | 55 ++ ...loud.orchestration.airflow.service.v1.json | 306 +++++++ ...orchestration.airflow.service.v1beta1.json | 306 +++++++ scripts/fixup_service_v1_keywords.py | 2 + scripts/fixup_service_v1beta1_keywords.py | 2 + .../gapic/service_v1/test_environments.py | 290 +++++++ .../service_v1beta1/test_environments.py | 290 +++++++ 37 files changed, 4370 insertions(+), 199 deletions(-) create mode 100644 samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py create mode 100644 samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py create mode 100644 samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py create mode 100644 samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py create mode 100644 samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py create mode 100644 samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py create mode 100644 samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py create mode 100644 samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py diff --git a/google/cloud/orchestration/airflow/service/__init__.py b/google/cloud/orchestration/airflow/service/__init__.py index 1d4216c..660dedc 100644 --- a/google/cloud/orchestration/airflow/service/__init__.py +++ b/google/cloud/orchestration/airflow/service/__init__.py @@ -42,13 +42,23 @@ IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, + MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, + WorkloadsConfig, ) from google.cloud.orchestration.airflow.service_v1.types.image_versions import ( ImageVersion, @@ -75,13 +85,23 @@ "IPAllocationPolicy", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", + "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", + "WorkloadsConfig", "ImageVersion", "ListImageVersionsRequest", "ListImageVersionsResponse", diff --git a/google/cloud/orchestration/airflow/service_v1/__init__.py b/google/cloud/orchestration/airflow/service_v1/__init__.py index c8b3ab7..305b555 100644 --- a/google/cloud/orchestration/airflow/service_v1/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1/__init__.py @@ -32,13 +32,23 @@ IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, + MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, + WorkloadsConfig, ) from .types.image_versions import ( ImageVersion, @@ -66,12 +76,22 @@ "ListEnvironmentsResponse", "ListImageVersionsRequest", "ListImageVersionsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", + "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", "OperationMetadata", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", + "WorkloadsConfig", ) diff --git a/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json b/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json index a98b49c..1c4eaec 100644 --- a/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json +++ b/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json @@ -30,6 +30,16 @@ "list_environments" ] }, + "LoadSnapshot": { + "methods": [ + "load_snapshot" + ] + }, + "SaveSnapshot": { + "methods": [ + "save_snapshot" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" @@ -60,6 +70,16 @@ "list_environments" ] }, + "LoadSnapshot": { + "methods": [ + "load_snapshot" + ] + }, + "SaveSnapshot": { + "methods": [ + "save_snapshot" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py b/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py index 22972a4..38b8a2e 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py @@ -731,23 +731,14 @@ async def sample_update_environment(): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` - field. + field. Supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. - ``config.webServerNetworkAccessControl`` - Replace the environment's current ``WebServerNetworkAccessControl``. - - ``config.databaseConfig`` - - - Replace the environment's current - ``DatabaseConfig``. - - - ``config.webServerConfig`` - - - Replace the environment's current - ``WebServerConfig``. - - ``config.softwareConfig.airflowConfigOverrides`` - Replace all Apache Airflow config overrides. If a @@ -775,9 +766,34 @@ async def sample_update_environment(): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom - environment variables are cleared. It is an error - to provide both this mask and a mask specifying - one or more individual environment variables. + environment variables are cleared. + + - ``config.softwareConfig.schedulerCount`` + + - Horizontally scale the number of schedulers in + Airflow. A positive integer not greater than the + number of nodes must be provided in the + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + + - ``config.databaseConfig.machineType`` + + - Cloud SQL machine type used by Airflow database. + It has to be one of: db-n1-standard-2, + db-n1-standard-4, db-n1-standard-8 or + db-n1-standard-16. Supported for Cloud Composer + environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + + - Machine type on which Airflow web server is + running. It has to be one of: + composer-n1-webserver-2, composer-n1-webserver-4 + or composer-n1-webserver-8. Supported for Cloud + Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -974,6 +990,204 @@ async def sample_delete_environment(): # Done; return the response. return response + async def save_snapshot( + self, + request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest, dict]]): + The request object. Request to create a snapshot of a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotResponse` + Response to SaveSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.SaveSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.save_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.SaveSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def load_snapshot( + self, + request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest, dict]]): + The request object. Request to load a snapshot into a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotResponse` + Response to LoadSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.LoadSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.load_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.LoadSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/client.py b/google/cloud/orchestration/airflow/service_v1/services/environments/client.py index 8dd9f4b..59cba2d 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/client.py @@ -962,23 +962,14 @@ def sample_update_environment(): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` - field. + field. Supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. - ``config.webServerNetworkAccessControl`` - Replace the environment's current ``WebServerNetworkAccessControl``. - - ``config.databaseConfig`` - - - Replace the environment's current - ``DatabaseConfig``. - - - ``config.webServerConfig`` - - - Replace the environment's current - ``WebServerConfig``. - - ``config.softwareConfig.airflowConfigOverrides`` - Replace all Apache Airflow config overrides. If a @@ -1006,9 +997,34 @@ def sample_update_environment(): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom - environment variables are cleared. It is an error - to provide both this mask and a mask specifying - one or more individual environment variables. + environment variables are cleared. + + - ``config.softwareConfig.schedulerCount`` + + - Horizontally scale the number of schedulers in + Airflow. A positive integer not greater than the + number of nodes must be provided in the + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + + - ``config.databaseConfig.machineType`` + + - Cloud SQL machine type used by Airflow database. + It has to be one of: db-n1-standard-2, + db-n1-standard-4, db-n1-standard-8 or + db-n1-standard-16. Supported for Cloud Composer + environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + + - Machine type on which Airflow web server is + running. It has to be one of: + composer-n1-webserver-2, composer-n1-webserver-4 + or composer-n1-webserver-8. Supported for Cloud + Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1205,6 +1221,206 @@ def sample_delete_environment(): # Done; return the response. return response + def save_snapshot( + self, + request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest, dict]): + The request object. Request to create a snapshot of a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotResponse` + Response to SaveSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.SaveSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.SaveSnapshotRequest): + request = environments.SaveSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.save_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.SaveSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + def load_snapshot( + self, + request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest, dict]): + The request object. Request to load a snapshot into a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotResponse` + Response to LoadSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.LoadSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.LoadSnapshotRequest): + request = environments.LoadSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.load_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.LoadSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py index 8857232..98f65e1 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py @@ -153,6 +153,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.save_snapshot: gapic_v1.method.wrap_method( + self.save_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.load_snapshot: gapic_v1.method.wrap_method( + self.load_snapshot, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -217,6 +227,24 @@ def delete_environment( ]: raise NotImplementedError() + @property + def save_snapshot( + self, + ) -> Callable[ + [environments.SaveSnapshotRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def load_snapshot( + self, + ) -> Callable[ + [environments.LoadSnapshotRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py index 5c43736..bae6716 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py @@ -376,6 +376,64 @@ def delete_environment( ) return self._stubs["delete_environment"] + @property + def save_snapshot( + self, + ) -> Callable[[environments.SaveSnapshotRequest], operations_pb2.Operation]: + r"""Return a callable for the save snapshot method over gRPC. + + Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + Returns: + Callable[[~.SaveSnapshotRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "save_snapshot" not in self._stubs: + self._stubs["save_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/SaveSnapshot", + request_serializer=environments.SaveSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["save_snapshot"] + + @property + def load_snapshot( + self, + ) -> Callable[[environments.LoadSnapshotRequest], operations_pb2.Operation]: + r"""Return a callable for the load snapshot method over gRPC. + + Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + Returns: + Callable[[~.LoadSnapshotRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_snapshot" not in self._stubs: + self._stubs["load_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/LoadSnapshot", + request_serializer=environments.LoadSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_snapshot"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py index d14cbb1..b27b83a 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py @@ -390,6 +390,68 @@ def delete_environment( ) return self._stubs["delete_environment"] + @property + def save_snapshot( + self, + ) -> Callable[ + [environments.SaveSnapshotRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the save snapshot method over gRPC. + + Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + Returns: + Callable[[~.SaveSnapshotRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "save_snapshot" not in self._stubs: + self._stubs["save_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/SaveSnapshot", + request_serializer=environments.SaveSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["save_snapshot"] + + @property + def load_snapshot( + self, + ) -> Callable[ + [environments.LoadSnapshotRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the load snapshot method over gRPC. + + Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + Returns: + Callable[[~.LoadSnapshotRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_snapshot" not in self._stubs: + self._stubs["load_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/LoadSnapshot", + request_serializer=environments.LoadSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_snapshot"] + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/orchestration/airflow/service_v1/types/__init__.py b/google/cloud/orchestration/airflow/service_v1/types/__init__.py index d1be10d..7f77a37 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1/types/__init__.py @@ -25,13 +25,23 @@ IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, + MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, + WorkloadsConfig, ) from .image_versions import ( ImageVersion, @@ -52,13 +62,23 @@ "IPAllocationPolicy", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", + "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", + "WorkloadsConfig", "ImageVersion", "ListImageVersionsRequest", "ListImageVersionsResponse", diff --git a/google/cloud/orchestration/airflow/service_v1/types/environments.py b/google/cloud/orchestration/airflow/service_v1/types/environments.py index e29369d..c1cf78f 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/environments.py +++ b/google/cloud/orchestration/airflow/service_v1/types/environments.py @@ -28,16 +28,26 @@ "ListEnvironmentsResponse", "DeleteEnvironmentRequest", "UpdateEnvironmentRequest", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", "EnvironmentConfig", "WebServerNetworkAccessControl", "DatabaseConfig", "WebServerConfig", "EncryptionConfig", + "MaintenanceWindow", "SoftwareConfig", "IPAllocationPolicy", "NodeConfig", "PrivateClusterConfig", + "NetworkingConfig", "PrivateEnvironmentConfig", + "WorkloadsConfig", + "RecoveryConfig", + "ScheduledSnapshotsConfig", + "MasterAuthorizedNetworksConfig", "Environment", "CheckUpgradeResponse", }, @@ -276,20 +286,14 @@ class UpdateEnvironmentRequest(proto.Message): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` field. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. - ``config.webServerNetworkAccessControl`` - Replace the environment's current ``WebServerNetworkAccessControl``. - - ``config.databaseConfig`` - - - Replace the environment's current ``DatabaseConfig``. - - - ``config.webServerConfig`` - - - Replace the environment's current ``WebServerConfig``. - - ``config.softwareConfig.airflowConfigOverrides`` - Replace all Apache Airflow config overrides. If a @@ -314,9 +318,32 @@ class UpdateEnvironmentRequest(proto.Message): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom environment variables are - cleared. It is an error to provide both this mask and - a mask specifying one or more individual environment - variables. + cleared. + + - ``config.softwareConfig.schedulerCount`` + + - Horizontally scale the number of schedulers in + Airflow. A positive integer not greater than the + number of nodes must be provided in the + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-2.*.*. + + - ``config.databaseConfig.machineType`` + + - Cloud SQL machine type used by Airflow database. It + has to be one of: db-n1-standard-2, db-n1-standard-4, + db-n1-standard-8 or db-n1-standard-16. Supported for + Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + + - Machine type on which Airflow web server is running. + It has to be one of: composer-n1-webserver-2, + composer-n1-webserver-4 or composer-n1-webserver-8. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. """ name: str = proto.Field( @@ -335,6 +362,103 @@ class UpdateEnvironmentRequest(proto.Message): ) +class SaveSnapshotRequest(proto.Message): + r"""Request to create a snapshot of a Cloud Composer environment. + + Attributes: + environment (str): + The resource name of the source environment + in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + snapshot_location (str): + Location in a Cloud Storage where the + snapshot is going to be stored, e.g.: + "gs://my-bucket/snapshots". + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SaveSnapshotResponse(proto.Message): + r"""Response to SaveSnapshotRequest. + + Attributes: + snapshot_path (str): + The fully-resolved Cloud Storage path of the created + snapshot, e.g.: + "gs://my-bucket/snapshots/project_location_environment_timestamp". + This field is populated only if the snapshot creation was + successful. + """ + + snapshot_path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LoadSnapshotRequest(proto.Message): + r"""Request to load a snapshot into a Cloud Composer environment. + + Attributes: + environment (str): + The resource name of the target environment + in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + snapshot_path (str): + A Cloud Storage path to a snapshot to load, e.g.: + "gs://my-bucket/snapshots/project_location_environment_timestamp". + skip_pypi_packages_installation (bool): + Whether or not to skip installing Pypi + packages when loading the environment's state. + skip_environment_variables_setting (bool): + Whether or not to skip setting environment + variables when loading the environment's state. + skip_airflow_overrides_setting (bool): + Whether or not to skip setting Airflow + overrides when loading the environment's state. + skip_gcs_data_copying (bool): + Whether or not to skip copying Cloud Storage + data when loading the environment's state. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_path: str = proto.Field( + proto.STRING, + number=2, + ) + skip_pypi_packages_installation: bool = proto.Field( + proto.BOOL, + number=3, + ) + skip_environment_variables_setting: bool = proto.Field( + proto.BOOL, + number=4, + ) + skip_airflow_overrides_setting: bool = proto.Field( + proto.BOOL, + number=5, + ) + skip_gcs_data_copying: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class LoadSnapshotResponse(proto.Message): + r"""Response to LoadSnapshotRequest.""" + + class EnvironmentConfig(proto.Message): r"""Configuration information for an environment. @@ -351,9 +475,11 @@ class EnvironmentConfig(proto.Message): for this environment reside in a simulated directory with the given prefix. node_count (int): - The number of nodes in the Kubernetes Engine - cluster that will be used to run this - environment. + The number of nodes in the Kubernetes Engine cluster that + will be used to run this environment. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. software_config (google.cloud.orchestration.airflow.service_v1.types.SoftwareConfig): The configuration settings for software inside the environment. @@ -379,12 +505,65 @@ class EnvironmentConfig(proto.Message): Optional. The encryption options for the Cloud Composer environment and its dependencies. Cannot be updated. + maintenance_window (google.cloud.orchestration.airflow.service_v1.types.MaintenanceWindow): + Optional. The maintenance window is the + period when Cloud Composer components may + undergo maintenance. It is defined so that + maintenance is not executed during peak hours or + critical time periods. + + The system will not be under maintenance for + every occurrence of this window, but when + maintenance is planned, it will be scheduled + during the window. + + The maintenance window period must encompass at + least 12 hours per week. This may be split into + multiple chunks, each with a size of at least 4 + hours. + + If this value is omitted, the default value for + maintenance window will be applied. The default + value is Saturday and Sunday 00-06 GMT. + workloads_config (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig): + Optional. The workloads configuration settings for the GKE + cluster associated with the Cloud Composer environment. The + GKE cluster runs Airflow scheduler, web server and workers + workloads. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. + environment_size (google.cloud.orchestration.airflow.service_v1.types.EnvironmentConfig.EnvironmentSize): + Optional. The size of the Cloud Composer environment. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. airflow_uri (str): Output only. The URI of the Apache Airflow Web UI hosted within this environment (see `Airflow web interface `__). + master_authorized_networks_config (google.cloud.orchestration.airflow.service_v1.types.MasterAuthorizedNetworksConfig): + Optional. The configuration options for GKE + cluster master authorized networks. By default + master authorized networks feature is: - in case + of private environment: enabled with no external + networks allowlisted. + - in case of public environment: disabled. + recovery_config (google.cloud.orchestration.airflow.service_v1.types.RecoveryConfig): + Optional. The Recovery settings configuration of an + environment. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. """ + class EnvironmentSize(proto.Enum): + r"""The size of the Cloud Composer environment.""" + ENVIRONMENT_SIZE_UNSPECIFIED = 0 + ENVIRONMENT_SIZE_SMALL = 1 + ENVIRONMENT_SIZE_MEDIUM = 2 + ENVIRONMENT_SIZE_LARGE = 3 + gke_cluster: str = proto.Field( proto.STRING, number=1, @@ -432,10 +611,35 @@ class EnvironmentConfig(proto.Message): number=11, message="EncryptionConfig", ) + maintenance_window: "MaintenanceWindow" = proto.Field( + proto.MESSAGE, + number=12, + message="MaintenanceWindow", + ) + workloads_config: "WorkloadsConfig" = proto.Field( + proto.MESSAGE, + number=15, + message="WorkloadsConfig", + ) + environment_size: EnvironmentSize = proto.Field( + proto.ENUM, + number=16, + enum=EnvironmentSize, + ) airflow_uri: str = proto.Field( proto.STRING, number=6, ) + master_authorized_networks_config: "MasterAuthorizedNetworksConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="MasterAuthorizedNetworksConfig", + ) + recovery_config: "RecoveryConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="RecoveryConfig", + ) class WebServerNetworkAccessControl(proto.Message): @@ -489,11 +693,11 @@ class DatabaseConfig(proto.Message): Attributes: machine_type (str): - Optional. Cloud SQL machine type used by - Airflow database. It has to be one of: - db-n1-standard-2, db-n1-standard-4, - db-n1-standard-8 or db-n1-standard-16. If not - specified, db-n1-standard-2 will be used. + Optional. Cloud SQL machine type used by Airflow database. + It has to be one of: db-n1-standard-2, db-n1-standard-4, + db-n1-standard-8 or db-n1-standard-16. If not specified, + db-n1-standard-2 will be used. Supported for Cloud Composer + environments in versions composer-1.\ *.*-airflow-*.*.*. """ machine_type: str = proto.Field( @@ -503,8 +707,9 @@ class DatabaseConfig(proto.Message): class WebServerConfig(proto.Message): - r"""The configuration settings for the Airflow web server App - Engine instance. + r"""The configuration settings for the Airflow web server App Engine + instance. Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.\* Attributes: machine_type (str): @@ -525,8 +730,9 @@ class WebServerConfig(proto.Message): class EncryptionConfig(proto.Message): - r"""The encryption options for the Cloud Composer environment - and its dependencies. + r"""The encryption options for the Cloud Composer environment and its + dependencies.Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. Attributes: kms_key_name (str): @@ -542,6 +748,54 @@ class EncryptionConfig(proto.Message): ) +class MaintenanceWindow(proto.Message): + r"""The configuration settings for Cloud Composer maintenance window. + The following example: + + :: + + { + "startTime":"2019-08-01T01:00:00Z" + "endTime":"2019-08-01T07:00:00Z" + "recurrence":"FREQ=WEEKLY;BYDAY=TU,WE" + } + + would define a maintenance window between 01 and 07 hours UTC during + each Tuesday and Wednesday. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Start time of the first recurrence + of the maintenance window. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Maintenance window end time. It is used only to + calculate the duration of the maintenance window. The value + for end-time must be in the future, relative to + ``start_time``. + recurrence (str): + Required. Maintenance window recurrence. Format is a subset + of `RFC-5545 `__ + ``RRULE``. The only allowed values for ``FREQ`` field are + ``FREQ=DAILY`` and ``FREQ=WEEKLY;BYDAY=...`` Example values: + ``FREQ=WEEKLY;BYDAY=TU,WE``, ``FREQ=DAILY``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + recurrence: str = proto.Field( + proto.STRING, + number=3, + ) + + class SoftwareConfig(proto.Message): r"""Specifies the selection and configuration of software inside the environment. @@ -552,25 +806,32 @@ class SoftwareConfig(proto.Message): encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression - ``composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?``. + ``composer-([0-9]+(\.[0-9]+\.[0-9]+(-preview\.[0-9]+)?)?|latest)-airflow-([0-9]+(\.[0-9]+(\.[0-9]+)?)?)``. When used as input, the server also checks if the provided version is supported and denies the request for an unsupported version. - The Cloud Composer portion of the version is a `semantic - version `__ or ``latest``. When the - patch version is omitted, the current Cloud Composer patch - version is selected. When ``latest`` is provided instead of - an explicit version number, the server replaces ``latest`` - with the current Cloud Composer version and stores that - version number in the same field. - - The portion of the image version that follows *airflow-* is - an official Apache Airflow repository `release - name `__. - - See also `Version - List `__. + The Cloud Composer portion of the image version is a full + `semantic version `__, or an alias in + the form of major version number or ``latest``. When an + alias is provided, the server replaces it with the current + Cloud Composer version that satisfies the alias. + + The Apache Airflow portion of the image version is a full + semantic version that points to one of the supported Apache + Airflow versions, or an alias in the form of only major or + major.minor versions specified. When an alias is provided, + the server replaces it with the latest Apache Airflow + version that satisfies the alias and is supported in the + given Cloud Composer version. + + In all cases, the resolved image version is stored in the + same field. + + See also `version + list `__ + and `versioning + overview `__. airflow_config_overrides (MutableMapping[str, str]): Optional. Apache Airflow configuration properties to override. @@ -624,11 +885,20 @@ class SoftwareConfig(proto.Message): - ``SQL_REGION`` - ``SQL_USER`` python_version (str): - Optional. The major version of Python used to - run the Apache Airflow scheduler, worker, and - webserver processes. - Can be set to '2' or '3'. If not specified, the - default is '3'. Cannot be updated. + Optional. The major version of Python used to run the Apache + Airflow scheduler, worker, and webserver processes. + + Can be set to '2' or '3'. If not specified, the default is + '3'. Cannot be updated. + + This field is only supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. Environments in + newer versions always use Python major version 3. + scheduler_count (int): + Optional. The number of schedulers for Airflow. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. """ image_version: str = proto.Field( @@ -654,6 +924,10 @@ class SoftwareConfig(proto.Message): proto.STRING, number=6, ) + scheduler_count: int = proto.Field( + proto.INT32, + number=7, + ) class IPAllocationPolicy(proto.Message): @@ -671,20 +945,26 @@ class IPAllocationPolicy(proto.Message): use_ip_aliases (bool): Optional. Whether or not to enable Alias IPs in the GKE cluster. If ``true``, a VPC-native cluster is created. + + This field is only supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. Environments in + newer versions always use VPC-native GKE clusters. cluster_secondary_range_name (str): Optional. The name of the GKE cluster's secondary range used to allocate IP addresses to pods. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. This field is a member of `oneof`_ ``cluster_ip_allocation``. cluster_ipv4_cidr_block (str): Optional. The IP address range used to allocate IP addresses to pods in the GKE cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. Set to blank to have GKE choose a range with the default size. @@ -693,7 +973,7 @@ class IPAllocationPolicy(proto.Message): with a specific netmask. Set to a - `CIDR `__ + `CIDR `__ notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, ``192.168.0.0/16``) to pick a specific range to use. @@ -703,16 +983,18 @@ class IPAllocationPolicy(proto.Message): Optional. The name of the services' secondary range used to allocate IP addresses to the GKE cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. This field is a member of `oneof`_ ``services_ip_allocation``. services_ipv4_cidr_block (str): Optional. The IP address range of the services IP addresses in this GKE cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. Set to blank to have GKE choose a range with the default size. @@ -721,7 +1003,7 @@ class IPAllocationPolicy(proto.Message): with a specific netmask. Set to a - `CIDR `__ + `CIDR `__ notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, ``192.168.0.0/16``) to pick a specific range to use. @@ -778,6 +1060,9 @@ class NodeConfig(proto.Message): one field (``location`` or ``nodeConfig.machineType``) is specified, the location information from the specified field will be propagated to the unspecified field. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. machine_type (str): Optional. The Compute Engine `machine type `__ used for cluster @@ -802,6 +1087,9 @@ class NodeConfig(proto.Message): If this field is unspecified, the ``machineTypeId`` defaults to "n1-standard-1". + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. network (str): Optional. The Compute Engine network to be used for machine communications, specified as a `relative resource @@ -826,14 +1114,20 @@ class NodeConfig(proto.Message): also be provided, and the subnetwork must belong to the enclosing environment's project and location. disk_size_gb (int): - Optional. The disk size in GB used for node - VMs. Minimum size is 20GB. If unspecified, - defaults to 100GB. Cannot be updated. + Optional. The disk size in GB used for node VMs. Minimum + size is 30GB. If unspecified, defaults to 100GB. Cannot be + updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. oauth_scopes (MutableSequence[str]): Optional. The set of Google API scopes to be made available on all node VMs. If ``oauth_scopes`` is empty, defaults to ["https://www.googleapis.com/auth/cloud-platform"]. Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. service_account (str): Optional. The Google Cloud Platform Service Account to be used by the node VMs. If a service @@ -846,9 +1140,20 @@ class NodeConfig(proto.Message): network firewalls. Each tag within the list must comply with `RFC1035 `__. Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. ip_allocation_policy (google.cloud.orchestration.airflow.service_v1.types.IPAllocationPolicy): Optional. The configuration for controlling how IPs are allocated in the GKE cluster. + enable_ip_masq_agent (bool): + Optional. Deploys 'ip-masq-agent' daemon set + in the GKE cluster and defines + nonMasqueradeCIDRs equals to pod IP range so IP + masquerading is used for all destination + addresses, except between pods traffic. + See: + https://cloud.google.com/kubernetes-engine/docs/how-to/ip-masquerade-agent """ location: str = proto.Field( @@ -888,6 +1193,10 @@ class NodeConfig(proto.Message): number=9, message="IPAllocationPolicy", ) + enable_ip_masq_agent: bool = proto.Field( + proto.BOOL, + number=11, + ) class PrivateClusterConfig(proto.Message): @@ -927,6 +1236,34 @@ class PrivateClusterConfig(proto.Message): ) +class NetworkingConfig(proto.Message): + r"""Configuration options for networking connections in the + Composer 2 environment. + + Attributes: + connection_type (google.cloud.orchestration.airflow.service_v1.types.NetworkingConfig.ConnectionType): + Optional. Indicates the user requested + specifc connection type between Tenant and + Customer projects. You cannot set networking + connection type in public IP environment. + """ + + class ConnectionType(proto.Enum): + r"""Represents connection type between Composer environment in + Customer Project and the corresponding Tenant project, from a + predefined list of available connection modes. + """ + CONNECTION_TYPE_UNSPECIFIED = 0 + VPC_PEERING = 1 + PRIVATE_SERVICE_CONNECT = 2 + + connection_type: ConnectionType = proto.Field( + proto.ENUM, + number=1, + enum=ConnectionType, + ) + + class PrivateEnvironmentConfig(proto.Message): r"""The configuration information for configuring a Private IP Cloud Composer environment. @@ -935,7 +1272,9 @@ class PrivateEnvironmentConfig(proto.Message): enable_private_environment (bool): Optional. If ``true``, a Private IP Cloud Composer environment is created. If this field is set to true, - ``IPAllocationPolicy.use_ip_aliases`` must be set to true. + ``IPAllocationPolicy.use_ip_aliases`` must be set to true + for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. private_cluster_config (google.cloud.orchestration.airflow.service_v1.types.PrivateClusterConfig): Optional. Configuration for the private GKE cluster for a Private IP Cloud Composer @@ -945,13 +1284,49 @@ class PrivateEnvironmentConfig(proto.Message): will be reserved. Needs to be disjoint from ``private_cluster_config.master_ipv4_cidr_block`` and ``cloud_sql_ipv4_cidr_block``. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. cloud_sql_ipv4_cidr_block (str): Optional. The CIDR block from which IP range in tenant project will be reserved for Cloud SQL. Needs to be disjoint from ``web_server_ipv4_cidr_block``. web_server_ipv4_reserved_range (str): - Output only. The IP range reserved for the - tenant project's App Engine VMs. + Output only. The IP range reserved for the tenant project's + App Engine VMs. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. + cloud_composer_network_ipv4_cidr_block (str): + Optional. The CIDR block from which IP range for Cloud + Composer Network in tenant project will be reserved. Needs + to be disjoint from + private_cluster_config.master_ipv4_cidr_block and + cloud_sql_ipv4_cidr_block. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. + cloud_composer_network_ipv4_reserved_range (str): + Output only. The IP range reserved for the tenant project's + Cloud Composer network. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. + enable_privately_used_public_ips (bool): + Optional. When enabled, IPs from public (non-RFC1918) ranges + can be used for + ``IPAllocationPolicy.cluster_ipv4_cidr_block`` and + ``IPAllocationPolicy.service_ipv4_cidr_block``. + cloud_composer_connection_subnetwork (str): + Optional. When specified, the environment + will use Private Service Connect instead of VPC + peerings to connect to Cloud SQL in the Tenant + Project, and the PSC endpoint in the Customer + Project will use an IP address from this + subnetwork. + networking_config (google.cloud.orchestration.airflow.service_v1.types.NetworkingConfig): + Optional. Configuration for the network + connections configuration in the environment. """ enable_private_environment: bool = proto.Field( @@ -975,6 +1350,266 @@ class PrivateEnvironmentConfig(proto.Message): proto.STRING, number=5, ) + cloud_composer_network_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=7, + ) + cloud_composer_network_ipv4_reserved_range: str = proto.Field( + proto.STRING, + number=8, + ) + enable_privately_used_public_ips: bool = proto.Field( + proto.BOOL, + number=6, + ) + cloud_composer_connection_subnetwork: str = proto.Field( + proto.STRING, + number=9, + ) + networking_config: "NetworkingConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="NetworkingConfig", + ) + + +class WorkloadsConfig(proto.Message): + r"""The Kubernetes workloads configuration for GKE cluster associated + with the Cloud Composer environment. Supported for Cloud Composer + environments in versions composer-2.\ *.*-airflow-*.*.\* and newer. + + Attributes: + scheduler (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig.SchedulerResource): + Optional. Resources used by Airflow + schedulers. + web_server (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig.WebServerResource): + Optional. Resources used by Airflow web + server. + worker (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig.WorkerResource): + Optional. Resources used by Airflow workers. + """ + + class SchedulerResource(proto.Message): + r"""Configuration for resources used by Airflow schedulers. + + Attributes: + cpu (float): + Optional. CPU request and limit for a single + Airflow scheduler replica. + memory_gb (float): + Optional. Memory (GB) request and limit for a + single Airflow scheduler replica. + storage_gb (float): + Optional. Storage (GB) request and limit for + a single Airflow scheduler replica. + count (int): + Optional. The number of schedulers. + """ + + cpu: float = proto.Field( + proto.FLOAT, + number=1, + ) + memory_gb: float = proto.Field( + proto.FLOAT, + number=2, + ) + storage_gb: float = proto.Field( + proto.FLOAT, + number=3, + ) + count: int = proto.Field( + proto.INT32, + number=4, + ) + + class WebServerResource(proto.Message): + r"""Configuration for resources used by Airflow web server. + + Attributes: + cpu (float): + Optional. CPU request and limit for Airflow + web server. + memory_gb (float): + Optional. Memory (GB) request and limit for + Airflow web server. + storage_gb (float): + Optional. Storage (GB) request and limit for + Airflow web server. + """ + + cpu: float = proto.Field( + proto.FLOAT, + number=1, + ) + memory_gb: float = proto.Field( + proto.FLOAT, + number=2, + ) + storage_gb: float = proto.Field( + proto.FLOAT, + number=3, + ) + + class WorkerResource(proto.Message): + r"""Configuration for resources used by Airflow workers. + + Attributes: + cpu (float): + Optional. CPU request and limit for a single + Airflow worker replica. + memory_gb (float): + Optional. Memory (GB) request and limit for a + single Airflow worker replica. + storage_gb (float): + Optional. Storage (GB) request and limit for + a single Airflow worker replica. + min_count (int): + Optional. Minimum number of workers for + autoscaling. + max_count (int): + Optional. Maximum number of workers for + autoscaling. + """ + + cpu: float = proto.Field( + proto.FLOAT, + number=1, + ) + memory_gb: float = proto.Field( + proto.FLOAT, + number=2, + ) + storage_gb: float = proto.Field( + proto.FLOAT, + number=3, + ) + min_count: int = proto.Field( + proto.INT32, + number=4, + ) + max_count: int = proto.Field( + proto.INT32, + number=5, + ) + + scheduler: SchedulerResource = proto.Field( + proto.MESSAGE, + number=1, + message=SchedulerResource, + ) + web_server: WebServerResource = proto.Field( + proto.MESSAGE, + number=2, + message=WebServerResource, + ) + worker: WorkerResource = proto.Field( + proto.MESSAGE, + number=3, + message=WorkerResource, + ) + + +class RecoveryConfig(proto.Message): + r"""The Recovery settings of an environment. + + Attributes: + scheduled_snapshots_config (google.cloud.orchestration.airflow.service_v1.types.ScheduledSnapshotsConfig): + Optional. The configuration for scheduled + snapshot creation mechanism. + """ + + scheduled_snapshots_config: "ScheduledSnapshotsConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="ScheduledSnapshotsConfig", + ) + + +class ScheduledSnapshotsConfig(proto.Message): + r"""The configuration for scheduled snapshot creation mechanism. + + Attributes: + enabled (bool): + Optional. Whether scheduled snapshots + creation is enabled. + snapshot_location (str): + Optional. The Cloud Storage location for + storing automatically created snapshots. + snapshot_creation_schedule (str): + Optional. The cron expression representing + the time when snapshots creation mechanism runs. + This field is subject to additional validation + around frequency of execution. + time_zone (str): + Optional. Time zone that sets the context to interpret + snapshot_creation_schedule. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + snapshot_location: str = proto.Field( + proto.STRING, + number=6, + ) + snapshot_creation_schedule: str = proto.Field( + proto.STRING, + number=3, + ) + time_zone: str = proto.Field( + proto.STRING, + number=5, + ) + + +class MasterAuthorizedNetworksConfig(proto.Message): + r"""Configuration options for the master authorized networks + feature. Enabled master authorized networks will disallow all + external traffic to access Kubernetes master through HTTPS + except traffic from the given CIDR blocks, Google Compute Engine + Public IPs and Google Prod IPs. + + Attributes: + enabled (bool): + Whether or not master authorized networks + feature is enabled. + cidr_blocks (MutableSequence[google.cloud.orchestration.airflow.service_v1.types.MasterAuthorizedNetworksConfig.CidrBlock]): + Up to 50 external networks that could access + Kubernetes master through HTTPS. + """ + + class CidrBlock(proto.Message): + r"""CIDR block with an optional name. + + Attributes: + display_name (str): + User-defined name that identifies the CIDR + block. + cidr_block (str): + CIDR block that must be specified in CIDR + notation. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + cidr_block: str = proto.Field( + proto.STRING, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + cidr_blocks: MutableSequence[CidrBlock] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=CidrBlock, + ) class Environment(proto.Message): diff --git a/google/cloud/orchestration/airflow/service_v1/types/image_versions.py b/google/cloud/orchestration/airflow/service_v1/types/image_versions.py index d338b01..323efa1 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/image_versions.py +++ b/google/cloud/orchestration/airflow/service_v1/types/image_versions.py @@ -97,7 +97,7 @@ class ImageVersion(proto.Message): Attributes: image_version_id (str): The string identifier of the ImageVersion, in - the form: "composer-x.y.z-airflow-a.b(.c)". + the form: "composer-x.y.z-airflow-a.b.c". is_default (bool): Whether this is the default ImageVersion used by Composer during environment creation if no diff --git a/google/cloud/orchestration/airflow/service_v1/types/operations.py b/google/cloud/orchestration/airflow/service_v1/types/operations.py index dbafb35..36db2ce 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/operations.py +++ b/google/cloud/orchestration/airflow/service_v1/types/operations.py @@ -69,6 +69,8 @@ class Type(proto.Enum): DELETE = 2 UPDATE = 3 CHECK = 4 + SAVE_SNAPSHOT = 5 + LOAD_SNAPSHOT = 6 state: State = proto.Field( proto.ENUM, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/__init__.py b/google/cloud/orchestration/airflow/service_v1beta1/__init__.py index c4fdc7f..cef0c25 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/__init__.py @@ -23,21 +23,32 @@ from .types.environments import ( CheckUpgradeRequest, CheckUpgradeResponse, + CloudDataLineageIntegration, CreateEnvironmentRequest, DatabaseConfig, DeleteEnvironmentRequest, EncryptionConfig, Environment, EnvironmentConfig, + ExecuteAirflowCommandResponse, GetEnvironmentRequest, IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, + PollAirflowCommandResponse, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, RestartWebServerRequest, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, @@ -56,6 +67,7 @@ "ImageVersionsAsyncClient", "CheckUpgradeRequest", "CheckUpgradeResponse", + "CloudDataLineageIntegration", "CreateEnvironmentRequest", "DatabaseConfig", "DeleteEnvironmentRequest", @@ -63,6 +75,7 @@ "Environment", "EnvironmentConfig", "EnvironmentsClient", + "ExecuteAirflowCommandResponse", "GetEnvironmentRequest", "IPAllocationPolicy", "ImageVersion", @@ -71,12 +84,21 @@ "ListEnvironmentsResponse", "ListImageVersionsRequest", "ListImageVersionsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", "OperationMetadata", + "PollAirflowCommandResponse", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", "RestartWebServerRequest", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", diff --git a/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json b/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json index 3899a07..69f39fb 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json +++ b/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json @@ -35,11 +35,21 @@ "list_environments" ] }, + "LoadSnapshot": { + "methods": [ + "load_snapshot" + ] + }, "RestartWebServer": { "methods": [ "restart_web_server" ] }, + "SaveSnapshot": { + "methods": [ + "save_snapshot" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" @@ -75,11 +85,21 @@ "list_environments" ] }, + "LoadSnapshot": { + "methods": [ + "load_snapshot" + ] + }, "RestartWebServer": { "methods": [ "restart_web_server" ] }, + "SaveSnapshot": { + "methods": [ + "save_snapshot" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py index 05b0464..66540ce 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py @@ -736,7 +736,11 @@ async def sample_update_environment(): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` - field. \* ``config.webServerNetworkAccessControl`` + field. Supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerNetworkAccessControl`` + - Replace the environment's current WebServerNetworkAccessControl. @@ -767,9 +771,7 @@ async def sample_update_environment(): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom - environment variables are cleared. It is an error - to provide both this mask and a mask specifying - one or more individual environment variables. + environment variables are cleared. - ``config.softwareConfig.imageVersion`` @@ -777,11 +779,11 @@ async def sample_update_environment(): Refer to ``SoftwareConfig.image_version`` for information on how to format the new image version. Additionally, the new image version - cannot effect a version downgrade and must match - the current image version's Composer major version - and Airflow major and minor versions. Consult the - `Cloud Composer Version - List `__ + cannot effect a version downgrade, and must match + the current image version's Composer and Airflow + major versions. Consult the `Cloud Composer + version + list `__ for valid values. - ``config.softwareConfig.schedulerCount`` @@ -789,21 +791,52 @@ async def sample_update_environment(): - Horizontally scale the number of schedulers in Airflow. A positive integer not greater than the number of nodes must be provided in the - ``config.softwareConfig.schedulerCount`` field. \* - ``config.databaseConfig.machineType`` + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + + - ``config.softwareConfig.cloudDataLineageIntegration`` + + - Configuration for Cloud Data Lineage integration. + + - ``config.databaseConfig.machineType`` + - Cloud SQL machine type used by Airflow database. It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 or - db-n1-standard-16. \* - ``config.webServerConfig.machineType`` + db-n1-standard-16. Supported for Cloud Composer + environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + - Machine type on which Airflow web server is running. It has to be one of: composer-n1-webserver-2, composer-n1-webserver-4 - or composer-n1-webserver-8. \* - ``config.maintenanceWindow`` + or composer-n1-webserver-8. Supported for Cloud + Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.maintenanceWindow`` + - Maintenance window during which Cloud Composer components may be under maintenance. + - ``config.workloadsConfig`` + + - The workloads configuration settings for the GKE + cluster associated with the Cloud Composer + environment. Supported for Cloud Composer + environments in versions + composer-2.\ *.*-airflow-*.*.\* and newer. + + - ``config.environmentSize`` + + - The size of the Cloud Composer environment. + Supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and + newer. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1190,6 +1223,204 @@ async def sample_check_upgrade(): # Done; return the response. return response + async def save_snapshot( + self, + request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + async def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest, dict]]): + The request object. Request to create a snapshot of a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotResponse` + Response to SaveSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.SaveSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.save_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.SaveSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def load_snapshot( + self, + request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + async def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest, dict]]): + The request object. Request to load a snapshot into a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotResponse` + Response to LoadSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.LoadSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.load_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.LoadSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py index fad5038..149d3ac 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py @@ -967,7 +967,11 @@ def sample_update_environment(): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` - field. \* ``config.webServerNetworkAccessControl`` + field. Supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerNetworkAccessControl`` + - Replace the environment's current WebServerNetworkAccessControl. @@ -998,9 +1002,7 @@ def sample_update_environment(): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom - environment variables are cleared. It is an error - to provide both this mask and a mask specifying - one or more individual environment variables. + environment variables are cleared. - ``config.softwareConfig.imageVersion`` @@ -1008,11 +1010,11 @@ def sample_update_environment(): Refer to ``SoftwareConfig.image_version`` for information on how to format the new image version. Additionally, the new image version - cannot effect a version downgrade and must match - the current image version's Composer major version - and Airflow major and minor versions. Consult the - `Cloud Composer Version - List `__ + cannot effect a version downgrade, and must match + the current image version's Composer and Airflow + major versions. Consult the `Cloud Composer + version + list `__ for valid values. - ``config.softwareConfig.schedulerCount`` @@ -1020,21 +1022,52 @@ def sample_update_environment(): - Horizontally scale the number of schedulers in Airflow. A positive integer not greater than the number of nodes must be provided in the - ``config.softwareConfig.schedulerCount`` field. \* - ``config.databaseConfig.machineType`` + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + + - ``config.softwareConfig.cloudDataLineageIntegration`` + + - Configuration for Cloud Data Lineage integration. + + - ``config.databaseConfig.machineType`` + - Cloud SQL machine type used by Airflow database. It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 or - db-n1-standard-16. \* - ``config.webServerConfig.machineType`` + db-n1-standard-16. Supported for Cloud Composer + environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + - Machine type on which Airflow web server is running. It has to be one of: composer-n1-webserver-2, composer-n1-webserver-4 - or composer-n1-webserver-8. \* - ``config.maintenanceWindow`` + or composer-n1-webserver-8. Supported for Cloud + Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.maintenanceWindow`` + - Maintenance window during which Cloud Composer components may be under maintenance. + - ``config.workloadsConfig`` + + - The workloads configuration settings for the GKE + cluster associated with the Cloud Composer + environment. Supported for Cloud Composer + environments in versions + composer-2.\ *.*-airflow-*.*.\* and newer. + + - ``config.environmentSize`` + + - The size of the Cloud Composer environment. + Supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and + newer. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1423,6 +1456,206 @@ def sample_check_upgrade(): # Done; return the response. return response + def save_snapshot( + self, + request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest, dict]): + The request object. Request to create a snapshot of a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotResponse` + Response to SaveSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.SaveSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.SaveSnapshotRequest): + request = environments.SaveSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.save_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.SaveSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + def load_snapshot( + self, + request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest, dict]): + The request object. Request to load a snapshot into a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotResponse` + Response to LoadSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.LoadSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.LoadSnapshotRequest): + request = environments.LoadSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.load_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.LoadSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py index fa7092c..e28de0c 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py @@ -163,6 +163,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.save_snapshot: gapic_v1.method.wrap_method( + self.save_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.load_snapshot: gapic_v1.method.wrap_method( + self.load_snapshot, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -245,6 +255,24 @@ def check_upgrade( ]: raise NotImplementedError() + @property + def save_snapshot( + self, + ) -> Callable[ + [environments.SaveSnapshotRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def load_snapshot( + self, + ) -> Callable[ + [environments.LoadSnapshotRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py index cef66e2..1fe5d26 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py @@ -431,6 +431,64 @@ def check_upgrade( ) return self._stubs["check_upgrade"] + @property + def save_snapshot( + self, + ) -> Callable[[environments.SaveSnapshotRequest], operations_pb2.Operation]: + r"""Return a callable for the save snapshot method over gRPC. + + Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + Returns: + Callable[[~.SaveSnapshotRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "save_snapshot" not in self._stubs: + self._stubs["save_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/SaveSnapshot", + request_serializer=environments.SaveSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["save_snapshot"] + + @property + def load_snapshot( + self, + ) -> Callable[[environments.LoadSnapshotRequest], operations_pb2.Operation]: + r"""Return a callable for the load snapshot method over gRPC. + + Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + Returns: + Callable[[~.LoadSnapshotRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_snapshot" not in self._stubs: + self._stubs["load_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/LoadSnapshot", + request_serializer=environments.LoadSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_snapshot"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py index a3ac7d7..4b3a149 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py @@ -449,6 +449,68 @@ def check_upgrade( ) return self._stubs["check_upgrade"] + @property + def save_snapshot( + self, + ) -> Callable[ + [environments.SaveSnapshotRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the save snapshot method over gRPC. + + Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + Returns: + Callable[[~.SaveSnapshotRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "save_snapshot" not in self._stubs: + self._stubs["save_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/SaveSnapshot", + request_serializer=environments.SaveSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["save_snapshot"] + + @property + def load_snapshot( + self, + ) -> Callable[ + [environments.LoadSnapshotRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the load snapshot method over gRPC. + + Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + Returns: + Callable[[~.LoadSnapshotRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_snapshot" not in self._stubs: + self._stubs["load_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/LoadSnapshot", + request_serializer=environments.LoadSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_snapshot"] + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py b/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py index 6492ba4..d23643d 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py @@ -16,21 +16,32 @@ from .environments import ( CheckUpgradeRequest, CheckUpgradeResponse, + CloudDataLineageIntegration, CreateEnvironmentRequest, DatabaseConfig, DeleteEnvironmentRequest, EncryptionConfig, Environment, EnvironmentConfig, + ExecuteAirflowCommandResponse, GetEnvironmentRequest, IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, + PollAirflowCommandResponse, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, RestartWebServerRequest, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, @@ -47,21 +58,32 @@ __all__ = ( "CheckUpgradeRequest", "CheckUpgradeResponse", + "CloudDataLineageIntegration", "CreateEnvironmentRequest", "DatabaseConfig", "DeleteEnvironmentRequest", "EncryptionConfig", "Environment", "EnvironmentConfig", + "ExecuteAirflowCommandResponse", "GetEnvironmentRequest", "IPAllocationPolicy", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", + "PollAirflowCommandResponse", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", "RestartWebServerRequest", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py b/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py index c5f3293..fadb9ad 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py @@ -29,18 +29,29 @@ "DeleteEnvironmentRequest", "UpdateEnvironmentRequest", "RestartWebServerRequest", + "ExecuteAirflowCommandResponse", + "PollAirflowCommandResponse", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", "EnvironmentConfig", "WebServerNetworkAccessControl", "SoftwareConfig", "IPAllocationPolicy", "NodeConfig", "PrivateClusterConfig", + "NetworkingConfig", "PrivateEnvironmentConfig", "DatabaseConfig", "WebServerConfig", "EncryptionConfig", "MaintenanceWindow", "WorkloadsConfig", + "RecoveryConfig", + "ScheduledSnapshotsConfig", + "MasterAuthorizedNetworksConfig", + "CloudDataLineageIntegration", "Environment", "CheckUpgradeRequest", "CheckUpgradeResponse", @@ -279,8 +290,12 @@ class UpdateEnvironmentRequest(proto.Message): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 - must be provided in the ``config.nodeCount`` field. \* - ``config.webServerNetworkAccessControl`` + must be provided in the ``config.nodeCount`` field. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerNetworkAccessControl`` + - Replace the environment's current WebServerNetworkAccessControl. @@ -308,20 +323,18 @@ class UpdateEnvironmentRequest(proto.Message): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom environment variables are - cleared. It is an error to provide both this mask and - a mask specifying one or more individual environment - variables. + cleared. - ``config.softwareConfig.imageVersion`` - Upgrade the version of the environment in-place. Refer to ``SoftwareConfig.image_version`` for information on how to format the new image version. Additionally, the - new image version cannot effect a version downgrade + new image version cannot effect a version downgrade, and must match the current image version's Composer - major version and Airflow major and minor versions. - Consult the `Cloud Composer Version - List `__ + and Airflow major versions. Consult the `Cloud + Composer version + list `__ for valid values. - ``config.softwareConfig.schedulerCount`` @@ -329,18 +342,47 @@ class UpdateEnvironmentRequest(proto.Message): - Horizontally scale the number of schedulers in Airflow. A positive integer not greater than the number of nodes must be provided in the - ``config.softwareConfig.schedulerCount`` field. \* - ``config.databaseConfig.machineType`` + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-2.*.*. + + - ``config.softwareConfig.cloudDataLineageIntegration`` + + - Configuration for Cloud Data Lineage integration. + + - ``config.databaseConfig.machineType`` + - Cloud SQL machine type used by Airflow database. It has to be one of: db-n1-standard-2, db-n1-standard-4, - db-n1-standard-8 or db-n1-standard-16. \* - ``config.webServerConfig.machineType`` + db-n1-standard-8 or db-n1-standard-16. Supported for + Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + - Machine type on which Airflow web server is running. It has to be one of: composer-n1-webserver-2, - composer-n1-webserver-4 or composer-n1-webserver-8. \* - ``config.maintenanceWindow`` + composer-n1-webserver-4 or composer-n1-webserver-8. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.maintenanceWindow`` + - Maintenance window during which Cloud Composer components may be under maintenance. + + - ``config.workloadsConfig`` + + - The workloads configuration settings for the GKE + cluster associated with the Cloud Composer + environment. Supported for Cloud Composer environments + in versions composer-2.\ *.*-airflow-*.*.\* and newer. + + - ``config.environmentSize`` + + - The size of the Cloud Composer environment. Supported + for Cloud Composer environments in versions + composer-2.\ *.*-airflow-*.*.\* and newer. """ name: str = proto.Field( @@ -375,6 +417,207 @@ class RestartWebServerRequest(proto.Message): ) +class ExecuteAirflowCommandResponse(proto.Message): + r"""Response to ExecuteAirflowCommandRequest. + + Attributes: + execution_id (str): + The unique ID of the command execution for + polling. + pod (str): + The name of the pod where the command is + executed. + pod_namespace (str): + The namespace of the pod where the command is + executed. + error (str): + Error message. Empty if there was no error. + """ + + execution_id: str = proto.Field( + proto.STRING, + number=1, + ) + pod: str = proto.Field( + proto.STRING, + number=2, + ) + pod_namespace: str = proto.Field( + proto.STRING, + number=3, + ) + error: str = proto.Field( + proto.STRING, + number=4, + ) + + +class PollAirflowCommandResponse(proto.Message): + r"""Response to PollAirflowCommandRequest. + + Attributes: + output (MutableSequence[google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandResponse.Line]): + Output from the command execution. It may not + contain the full output and the caller may need + to poll for more lines. + output_end (bool): + Whether the command execution has finished + and there is no more output. + exit_info (google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandResponse.ExitInfo): + The result exit status of the command. + """ + + class Line(proto.Message): + r"""Contains information about a single line from logs. + + Attributes: + line_number (int): + Number of the line. + content (str): + Text content of the log line. + """ + + line_number: int = proto.Field( + proto.INT32, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + + class ExitInfo(proto.Message): + r"""Information about how a command ended. + + Attributes: + exit_code (int): + The exit code from the command execution. + error (str): + Error message. Empty if there was no error. + """ + + exit_code: int = proto.Field( + proto.INT32, + number=1, + ) + error: str = proto.Field( + proto.STRING, + number=2, + ) + + output: MutableSequence[Line] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Line, + ) + output_end: bool = proto.Field( + proto.BOOL, + number=2, + ) + exit_info: ExitInfo = proto.Field( + proto.MESSAGE, + number=3, + message=ExitInfo, + ) + + +class SaveSnapshotRequest(proto.Message): + r"""Request to create a snapshot of a Cloud Composer environment. + + Attributes: + environment (str): + The resource name of the source environment + in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + snapshot_location (str): + Location in a Cloud Storage where the + snapshot is going to be stored, e.g.: + "gs://my-bucket/snapshots". + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SaveSnapshotResponse(proto.Message): + r"""Response to SaveSnapshotRequest. + + Attributes: + snapshot_path (str): + The fully-resolved Cloud Storage path of the created + snapshot, e.g.: + "gs://my-bucket/snapshots/project_location_environment_timestamp". + This field is populated only if the snapshot creation was + successful. + """ + + snapshot_path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LoadSnapshotRequest(proto.Message): + r"""Request to load a snapshot into a Cloud Composer environment. + + Attributes: + environment (str): + The resource name of the target environment + in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + snapshot_path (str): + A Cloud Storage path to a snapshot to load, e.g.: + "gs://my-bucket/snapshots/project_location_environment_timestamp". + skip_pypi_packages_installation (bool): + Whether or not to skip installing Pypi + packages when loading the environment's state. + skip_environment_variables_setting (bool): + Whether or not to skip setting environment + variables when loading the environment's state. + skip_airflow_overrides_setting (bool): + Whether or not to skip setting Airflow + overrides when loading the environment's state. + skip_gcs_data_copying (bool): + Whether or not to skip copying Cloud Storage + data when loading the environment's state. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_path: str = proto.Field( + proto.STRING, + number=2, + ) + skip_pypi_packages_installation: bool = proto.Field( + proto.BOOL, + number=3, + ) + skip_environment_variables_setting: bool = proto.Field( + proto.BOOL, + number=4, + ) + skip_airflow_overrides_setting: bool = proto.Field( + proto.BOOL, + number=5, + ) + skip_gcs_data_copying: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class LoadSnapshotResponse(proto.Message): + r"""Response to LoadSnapshotRequest.""" + + class EnvironmentConfig(proto.Message): r"""Configuration information for an environment. @@ -391,9 +634,11 @@ class EnvironmentConfig(proto.Message): for this environment reside in a simulated directory with the given prefix. node_count (int): - The number of nodes in the Kubernetes Engine - cluster that will be used to run this - environment. + The number of nodes in the Kubernetes Engine cluster that + will be used to run this environment. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. software_config (google.cloud.orchestration.airflow.service_v1beta1.types.SoftwareConfig): The configuration settings for software inside the environment. @@ -413,8 +658,11 @@ class EnvironmentConfig(proto.Message): Cloud SQL instance used internally by Apache Airflow software. web_server_config (google.cloud.orchestration.airflow.service_v1beta1.types.WebServerConfig): - Optional. The configuration settings for the - Airflow web server App Engine instance. + Optional. The configuration settings for the Airflow web + server App Engine instance. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. airflow_uri (str): Output only. The URI of the Apache Airflow Web UI hosted within this environment (see `Airflow web @@ -454,6 +702,19 @@ class EnvironmentConfig(proto.Message): environment_size (google.cloud.orchestration.airflow.service_v1beta1.types.EnvironmentConfig.EnvironmentSize): Optional. The size of the Cloud Composer environment. + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. + master_authorized_networks_config (google.cloud.orchestration.airflow.service_v1beta1.types.MasterAuthorizedNetworksConfig): + Optional. The configuration options for GKE + cluster master authorized networks. By default + master authorized networks feature is: - in case + of private environment: enabled with no external + networks allowlisted. + - in case of public environment: disabled. + recovery_config (google.cloud.orchestration.airflow.service_v1beta1.types.RecoveryConfig): + Optional. The Recovery settings configuration of an + environment. + This field is supported for Cloud Composer environments in versions composer-2.\ *.*-airflow-*.*.\* and newer. """ @@ -531,6 +792,16 @@ class EnvironmentSize(proto.Enum): number=16, enum=EnvironmentSize, ) + master_authorized_networks_config: "MasterAuthorizedNetworksConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="MasterAuthorizedNetworksConfig", + ) + recovery_config: "RecoveryConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="RecoveryConfig", + ) class WebServerNetworkAccessControl(proto.Message): @@ -588,25 +859,32 @@ class SoftwareConfig(proto.Message): encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression - ``composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?``. + ``composer-([0-9]+(\.[0-9]+\.[0-9]+(-preview\.[0-9]+)?)?|latest)-airflow-([0-9]+(\.[0-9]+(\.[0-9]+)?)?)``. When used as input, the server also checks if the provided version is supported and denies the request for an unsupported version. - The Cloud Composer portion of the version is a `semantic - version `__ or ``latest``. When the - patch version is omitted, the current Cloud Composer patch - version is selected. When ``latest`` is provided instead of - an explicit version number, the server replaces ``latest`` - with the current Cloud Composer version and stores that - version number in the same field. - - The portion of the image version that follows *airflow-* is - an official Apache Airflow repository `release - name `__. - - See also `Version - List `__. + The Cloud Composer portion of the image version is a full + `semantic version `__, or an alias in + the form of major version number or ``latest``. When an + alias is provided, the server replaces it with the current + Cloud Composer version that satisfies the alias. + + The Apache Airflow portion of the image version is a full + semantic version that points to one of the supported Apache + Airflow versions, or an alias in the form of only major or + major.minor versions specified. When an alias is provided, + the server replaces it with the latest Apache Airflow + version that satisfies the alias and is supported in the + given Cloud Composer version. + + In all cases, the resolved image version is stored in the + same field. + + See also `version + list `__ + and `versioning + overview `__. airflow_config_overrides (MutableMapping[str, str]): Optional. Apache Airflow configuration properties to override. @@ -660,11 +938,23 @@ class SoftwareConfig(proto.Message): - ``SQL_REGION`` - ``SQL_USER`` python_version (str): - Optional. The major version of Python used to - run the Apache Airflow scheduler, worker, and - webserver processes. - Can be set to '2' or '3'. If not specified, the - default is '3'. Cannot be updated. + Optional. The major version of Python used to run the Apache + Airflow scheduler, worker, and webserver processes. + + Can be set to '2' or '3'. If not specified, the default is + '3'. Cannot be updated. + + This field is only supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. Environments in + newer versions always use Python major version 3. + scheduler_count (int): + Optional. The number of schedulers for Airflow. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + cloud_data_lineage_integration (google.cloud.orchestration.airflow.service_v1beta1.types.CloudDataLineageIntegration): + Optional. The configuration for Cloud Data + Lineage integration. """ image_version: str = proto.Field( @@ -690,6 +980,15 @@ class SoftwareConfig(proto.Message): proto.STRING, number=6, ) + scheduler_count: int = proto.Field( + proto.INT32, + number=7, + ) + cloud_data_lineage_integration: "CloudDataLineageIntegration" = proto.Field( + proto.MESSAGE, + number=8, + message="CloudDataLineageIntegration", + ) class IPAllocationPolicy(proto.Message): @@ -700,28 +999,35 @@ class IPAllocationPolicy(proto.Message): use_ip_aliases (bool): Optional. Whether or not to enable Alias IPs in the GKE cluster. If ``true``, a VPC-native cluster is created. + + This field is only supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. Environments in + newer versions always use VPC-native GKE clusters. cluster_secondary_range_name (str): Optional. The name of the cluster's secondary range used to allocate IP addresses to pods. Specify either ``cluster_secondary_range_name`` or ``cluster_ipv4_cidr_block`` but not both. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. services_secondary_range_name (str): Optional. The name of the services' secondary range used to allocate IP addresses to the cluster. Specify either ``services_secondary_range_name`` or ``services_ipv4_cidr_block`` but not both. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. cluster_ipv4_cidr_block (str): Optional. The IP address range used to allocate IP addresses to pods in the cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. Set to blank to have GKE choose a range with the default size. @@ -730,7 +1036,7 @@ class IPAllocationPolicy(proto.Message): with a specific netmask. Set to a - `CIDR `__ + `CIDR `__ notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, ``192.168.0.0/16``) to pick a specific range to use. Specify @@ -740,8 +1046,9 @@ class IPAllocationPolicy(proto.Message): Optional. The IP address range of the services IP addresses in this cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. Set to blank to have GKE choose a range with the default size. @@ -750,7 +1057,7 @@ class IPAllocationPolicy(proto.Message): with a specific netmask. Set to a - `CIDR `__ + `CIDR `__ notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, ``192.168.0.0/16``) to pick a specific range to use. Specify @@ -803,6 +1110,9 @@ class NodeConfig(proto.Message): one field (``location`` or ``nodeConfig.machineType``) is specified, the location information from the specified field will be propagated to the unspecified field. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. machine_type (str): Optional. The Compute Engine `machine type `__ used for cluster @@ -827,6 +1137,9 @@ class NodeConfig(proto.Message): If this field is unspecified, the ``machineTypeId`` defaults to "n1-standard-1". + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. network (str): Optional. The Compute Engine network to be used for machine communications, specified as a `relative resource @@ -851,14 +1164,20 @@ class NodeConfig(proto.Message): also be provided, and the subnetwork must belong to the enclosing environment's project and location. disk_size_gb (int): - Optional. The disk size in GB used for node - VMs. Minimum size is 20GB. If unspecified, - defaults to 100GB. Cannot be updated. + Optional. The disk size in GB used for node VMs. Minimum + size is 30GB. If unspecified, defaults to 100GB. Cannot be + updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. oauth_scopes (MutableSequence[str]): Optional. The set of Google API scopes to be made available on all node VMs. If ``oauth_scopes`` is empty, defaults to ["https://www.googleapis.com/auth/cloud-platform"]. Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. service_account (str): Optional. The Google Cloud Platform Service Account to be used by the workloads. If a @@ -871,6 +1190,9 @@ class NodeConfig(proto.Message): network firewalls. Each tag within the list must comply with `RFC1035 `__. Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. ip_allocation_policy (google.cloud.orchestration.airflow.service_v1beta1.types.IPAllocationPolicy): Optional. The IPAllocationPolicy fields for the GKE cluster. @@ -886,6 +1208,17 @@ class NodeConfig(proto.Message): more information, see [Optimizing IP address allocation] (https://cloud.google.com/kubernetes-engine/docs/how-to/flexible-pod-cidr). Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. + enable_ip_masq_agent (bool): + Optional. Deploys 'ip-masq-agent' daemon set + in the GKE cluster and defines + nonMasqueradeCIDRs equals to pod IP range so IP + masquerading is used for all destination + addresses, except between pods traffic. + See: + https://cloud.google.com/kubernetes-engine/docs/how-to/ip-masquerade-agent """ location: str = proto.Field( @@ -929,6 +1262,10 @@ class NodeConfig(proto.Message): proto.INT32, number=10, ) + enable_ip_masq_agent: bool = proto.Field( + proto.BOOL, + number=11, + ) class PrivateClusterConfig(proto.Message): @@ -968,6 +1305,34 @@ class PrivateClusterConfig(proto.Message): ) +class NetworkingConfig(proto.Message): + r"""Configuration options for networking connections in the + Composer 2 environment. + + Attributes: + connection_type (google.cloud.orchestration.airflow.service_v1beta1.types.NetworkingConfig.ConnectionType): + Optional. Indicates the user requested + specifc connection type between Tenant and + Customer projects. You cannot set networking + connection type in public IP environment. + """ + + class ConnectionType(proto.Enum): + r"""Represents connection type between Composer environment in + Customer Project and the corresponding Tenant project, from a + predefined list of available connection modes. + """ + CONNECTION_TYPE_UNSPECIFIED = 0 + VPC_PEERING = 1 + PRIVATE_SERVICE_CONNECT = 2 + + connection_type: ConnectionType = proto.Field( + proto.ENUM, + number=1, + enum=ConnectionType, + ) + + class PrivateEnvironmentConfig(proto.Message): r"""The configuration information for configuring a Private IP Cloud Composer environment. @@ -976,7 +1341,9 @@ class PrivateEnvironmentConfig(proto.Message): enable_private_environment (bool): Optional. If ``true``, a Private IP Cloud Composer environment is created. If this field is set to true, - ``IPAllocationPolicy.use_ip_aliases`` must be set to true . + ``IPAllocationPolicy.use_ip_aliases`` must be set to true + for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. private_cluster_config (google.cloud.orchestration.airflow.service_v1beta1.types.PrivateClusterConfig): Optional. Configuration for the private GKE cluster for a Private IP Cloud Composer @@ -986,13 +1353,19 @@ class PrivateEnvironmentConfig(proto.Message): will be reserved. Needs to be disjoint from private_cluster_config.master_ipv4_cidr_block and cloud_sql_ipv4_cidr_block. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. cloud_sql_ipv4_cidr_block (str): Optional. The CIDR block from which IP range in tenant project will be reserved for Cloud SQL. Needs to be disjoint from web_server_ipv4_cidr_block web_server_ipv4_reserved_range (str): - Output only. The IP range reserved for the - tenant project's App Engine VMs. + Output only. The IP range reserved for the tenant project's + App Engine VMs. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. cloud_composer_network_ipv4_cidr_block (str): Optional. The CIDR block from which IP range for Cloud Composer Network in tenant project will be reserved. Needs @@ -1008,6 +1381,21 @@ class PrivateEnvironmentConfig(proto.Message): This field is supported for Cloud Composer environments in versions composer-2.\ *.*-airflow-*.*.\* and newer. + enable_privately_used_public_ips (bool): + Optional. When enabled, IPs from public (non-RFC1918) ranges + can be used for + ``IPAllocationPolicy.cluster_ipv4_cidr_block`` and + ``IPAllocationPolicy.service_ipv4_cidr_block``. + cloud_composer_connection_subnetwork (str): + Optional. When specified, the environment + will use Private Service Connect instead of VPC + peerings to connect to Cloud SQL in the Tenant + Project, and the PSC endpoint in the Customer + Project will use an IP address from this + subnetwork. + networking_config (google.cloud.orchestration.airflow.service_v1beta1.types.NetworkingConfig): + Optional. Configuration for the network + connections configuration in the environment. """ enable_private_environment: bool = proto.Field( @@ -1039,6 +1427,19 @@ class PrivateEnvironmentConfig(proto.Message): proto.STRING, number=8, ) + enable_privately_used_public_ips: bool = proto.Field( + proto.BOOL, + number=6, + ) + cloud_composer_connection_subnetwork: str = proto.Field( + proto.STRING, + number=9, + ) + networking_config: "NetworkingConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="NetworkingConfig", + ) class DatabaseConfig(proto.Message): @@ -1047,11 +1448,11 @@ class DatabaseConfig(proto.Message): Attributes: machine_type (str): - Optional. Cloud SQL machine type used by - Airflow database. It has to be one of: - db-n1-standard-2, db-n1-standard-4, - db-n1-standard-8 or db-n1-standard-16. If not - specified, db-n1-standard-2 will be used. + Optional. Cloud SQL machine type used by Airflow database. + It has to be one of: db-n1-standard-2, db-n1-standard-4, + db-n1-standard-8 or db-n1-standard-16. If not specified, + db-n1-standard-2 will be used. Supported for Cloud Composer + environments in versions composer-1.\ *.*-airflow-*.*.*. """ machine_type: str = proto.Field( @@ -1061,8 +1462,9 @@ class DatabaseConfig(proto.Message): class WebServerConfig(proto.Message): - r"""The configuration settings for the Airflow web server App - Engine instance. + r"""The configuration settings for the Airflow web server App Engine + instance. Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. Attributes: machine_type (str): @@ -1083,8 +1485,9 @@ class WebServerConfig(proto.Message): class EncryptionConfig(proto.Message): - r"""The encryption options for the Cloud Composer environment and - its dependencies. + r"""The encryption options for the Cloud Composer environment and its + dependencies. Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. Attributes: kms_key_name (str): @@ -1162,6 +1565,9 @@ class WorkloadsConfig(proto.Message): server. worker (google.cloud.orchestration.airflow.service_v1beta1.types.WorkloadsConfig.WorkerResource): Optional. Resources used by Airflow workers. + triggerer (google.cloud.orchestration.airflow.service_v1beta1.types.WorkloadsConfig.TriggererResource): + Optional. Resources used by Airflow + triggerers. """ class SchedulerResource(proto.Message): @@ -1268,6 +1674,33 @@ class WorkerResource(proto.Message): number=5, ) + class TriggererResource(proto.Message): + r"""Configuration for resources used by Airflow triggerers. + + Attributes: + count (int): + Optional. The number of triggerers. + cpu (float): + Optional. CPU request and limit for a single + Airflow triggerer replica. + memory_gb (float): + Optional. Memory (GB) request and limit for a + single Airflow triggerer replica. + """ + + count: int = proto.Field( + proto.INT32, + number=1, + ) + cpu: float = proto.Field( + proto.FLOAT, + number=2, + ) + memory_gb: float = proto.Field( + proto.FLOAT, + number=3, + ) + scheduler: SchedulerResource = proto.Field( proto.MESSAGE, number=1, @@ -1283,6 +1716,128 @@ class WorkerResource(proto.Message): number=3, message=WorkerResource, ) + triggerer: TriggererResource = proto.Field( + proto.MESSAGE, + number=4, + message=TriggererResource, + ) + + +class RecoveryConfig(proto.Message): + r"""The Recovery settings of an environment. + + Attributes: + scheduled_snapshots_config (google.cloud.orchestration.airflow.service_v1beta1.types.ScheduledSnapshotsConfig): + Optional. The configuration for scheduled + snapshot creation mechanism. + """ + + scheduled_snapshots_config: "ScheduledSnapshotsConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="ScheduledSnapshotsConfig", + ) + + +class ScheduledSnapshotsConfig(proto.Message): + r"""The configuration for scheduled snapshot creation mechanism. + + Attributes: + enabled (bool): + Optional. Whether scheduled snapshots + creation is enabled. + snapshot_location (str): + Optional. The Cloud Storage location for + storing automatically created snapshots. + snapshot_creation_schedule (str): + Optional. The cron expression representing + the time when snapshots creation mechanism runs. + This field is subject to additional validation + around frequency of execution. + time_zone (str): + Optional. Time zone that sets the context to interpret + snapshot_creation_schedule. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + snapshot_location: str = proto.Field( + proto.STRING, + number=6, + ) + snapshot_creation_schedule: str = proto.Field( + proto.STRING, + number=3, + ) + time_zone: str = proto.Field( + proto.STRING, + number=5, + ) + + +class MasterAuthorizedNetworksConfig(proto.Message): + r"""Configuration options for the master authorized networks + feature. Enabled master authorized networks will disallow all + external traffic to access Kubernetes master through HTTPS + except traffic from the given CIDR blocks, Google Compute Engine + Public IPs and Google Prod IPs. + + Attributes: + enabled (bool): + Whether or not master authorized networks + feature is enabled. + cidr_blocks (MutableSequence[google.cloud.orchestration.airflow.service_v1beta1.types.MasterAuthorizedNetworksConfig.CidrBlock]): + Up to 50 external networks that could access + Kubernetes master through HTTPS. + """ + + class CidrBlock(proto.Message): + r"""CIDR block with an optional name. + + Attributes: + display_name (str): + User-defined name that identifies the CIDR + block. + cidr_block (str): + CIDR block that must be specified in CIDR + notation. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + cidr_block: str = proto.Field( + proto.STRING, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + cidr_blocks: MutableSequence[CidrBlock] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=CidrBlock, + ) + + +class CloudDataLineageIntegration(proto.Message): + r"""Configuration for Cloud Data Lineage integration. + + Attributes: + enabled (bool): + Optional. Whether or not Cloud Data Lineage + integration is enabled. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) class Environment(proto.Message): @@ -1383,25 +1938,32 @@ class CheckUpgradeRequest(proto.Message): encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression - ``composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?``. + ``composer-([0-9]+(\.[0-9]+\.[0-9]+(-preview\.[0-9]+)?)?|latest)-airflow-([0-9]+(\.[0-9]+(\.[0-9]+)?)?)``. When used as input, the server also checks if the provided version is supported and denies the request for an unsupported version. - The Cloud Composer portion of the version is a `semantic - version `__ or ``latest``. When the - patch version is omitted, the current Cloud Composer patch - version is selected. When ``latest`` is provided instead of - an explicit version number, the server replaces ``latest`` - with the current Cloud Composer version and stores that - version number in the same field. - - The portion of the image version that follows ``airflow-`` - is an official Apache Airflow repository `release - name `__. - - See also [Version List] - (/composer/docs/concepts/versioning/composer-versions). + The Cloud Composer portion of the image version is a full + `semantic version `__, or an alias in + the form of major version number or ``latest``. When an + alias is provided, the server replaces it with the current + Cloud Composer version that satisfies the alias. + + The Apache Airflow portion of the image version is a full + semantic version that points to one of the supported Apache + Airflow versions, or an alias in the form of only major or + major.minor versions specified. When an alias is provided, + the server replaces it with the latest Apache Airflow + version that satisfies the alias and is supported in the + given Cloud Composer version. + + In all cases, the resolved image version is stored in the + same field. + + See also `version + list `__ + and `versioning + overview `__. """ environment: str = proto.Field( diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py b/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py index c23f704..557e3b5 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py @@ -97,7 +97,7 @@ class ImageVersion(proto.Message): Attributes: image_version_id (str): The string identifier of the ImageVersion, in - the form: "composer-x.y.z-airflow-a.b(.c)". + the form: "composer-x.y.z-airflow-a.b.c". is_default (bool): Whether this is the default ImageVersion used by Composer during environment creation if no diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py b/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py index 2f1bd5c..269b715 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py @@ -67,6 +67,8 @@ class Type(proto.Enum): DELETE = 2 UPDATE = 3 CHECK = 4 + SAVE_SNAPSHOT = 5 + LOAD_SNAPSHOT = 6 state: State = proto.Field( proto.ENUM, diff --git a/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py new file mode 100644 index 0000000..b7776e0 --- /dev/null +++ b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_LoadSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_LoadSnapshot_async] diff --git a/samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py new file mode 100644 index 0000000..9d32838 --- /dev/null +++ b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_LoadSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_LoadSnapshot_sync] diff --git a/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py new file mode 100644 index 0000000..ec387fe --- /dev/null +++ b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_SaveSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_SaveSnapshot_async] diff --git a/samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py new file mode 100644 index 0000000..09882bf --- /dev/null +++ b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_SaveSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_SaveSnapshot_sync] diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py new file mode 100644 index 0000000..fbd0dae --- /dev/null +++ b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_LoadSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_LoadSnapshot_async] diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py new file mode 100644 index 0000000..6424bbd --- /dev/null +++ b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_LoadSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_LoadSnapshot_sync] diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py new file mode 100644 index 0000000..de0f02a --- /dev/null +++ b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_SaveSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_SaveSnapshot_async] diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py new file mode 100644 index 0000000..88e01f0 --- /dev/null +++ b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_SaveSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_SaveSnapshot_sync] diff --git a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json index 5ebdced..afcb961 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -663,6 +663,312 @@ ], "title": "composer_v1_generated_environments_list_environments_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1_generated_environments_load_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_LoadSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_load_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1_generated_environments_load_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_LoadSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_load_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1_generated_environments_save_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_SaveSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_save_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1_generated_environments_save_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_SaveSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_save_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json index b13cbb3..5ba2ed3 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json @@ -816,6 +816,159 @@ ], "title": "composer_v1beta1_generated_environments_list_environments_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1beta1_generated_environments_load_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_LoadSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_load_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1beta1_generated_environments_load_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_LoadSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_load_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { @@ -969,6 +1122,159 @@ ], "title": "composer_v1beta1_generated_environments_restart_web_server_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1beta1_generated_environments_save_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_SaveSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_save_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1beta1_generated_environments_save_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_SaveSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_save_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/scripts/fixup_service_v1_keywords.py b/scripts/fixup_service_v1_keywords.py index bdacbaf..31a0db1 100644 --- a/scripts/fixup_service_v1_keywords.py +++ b/scripts/fixup_service_v1_keywords.py @@ -44,6 +44,8 @@ class serviceCallTransformer(cst.CSTTransformer): 'get_environment': ('name', ), 'list_environments': ('parent', 'page_size', 'page_token', ), 'list_image_versions': ('parent', 'page_size', 'page_token', 'include_past_releases', ), + 'load_snapshot': ('environment', 'snapshot_path', 'skip_pypi_packages_installation', 'skip_environment_variables_setting', 'skip_airflow_overrides_setting', 'skip_gcs_data_copying', ), + 'save_snapshot': ('environment', 'snapshot_location', ), 'update_environment': ('name', 'environment', 'update_mask', ), } diff --git a/scripts/fixup_service_v1beta1_keywords.py b/scripts/fixup_service_v1beta1_keywords.py index d070e52..a4ae0cb 100644 --- a/scripts/fixup_service_v1beta1_keywords.py +++ b/scripts/fixup_service_v1beta1_keywords.py @@ -45,7 +45,9 @@ class serviceCallTransformer(cst.CSTTransformer): 'get_environment': ('name', ), 'list_environments': ('parent', 'page_size', 'page_token', ), 'list_image_versions': ('parent', 'page_size', 'page_token', 'include_past_releases', ), + 'load_snapshot': ('environment', 'snapshot_path', 'skip_pypi_packages_installation', 'skip_environment_variables_setting', 'skip_airflow_overrides_setting', 'skip_gcs_data_copying', ), 'restart_web_server': ('name', ), + 'save_snapshot': ('environment', 'snapshot_location', ), 'update_environment': ('update_mask', 'name', 'environment', ), } diff --git a/tests/unit/gapic/service_v1/test_environments.py b/tests/unit/gapic/service_v1/test_environments.py index df22a3a..7926144 100644 --- a/tests/unit/gapic/service_v1/test_environments.py +++ b/tests/unit/gapic/service_v1/test_environments.py @@ -2112,6 +2112,294 @@ async def test_delete_environment_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + environments.SaveSnapshotRequest, + dict, + ], +) +def test_save_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_save_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + client.save_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + +@pytest.mark.asyncio +async def test_save_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.SaveSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_save_snapshot_async_from_dict(): + await test_save_snapshot_async(request_type=dict) + + +def test_save_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_save_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.LoadSnapshotRequest, + dict, + ], +) +def test_load_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_load_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + client.load_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + +@pytest.mark.asyncio +async def test_load_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.LoadSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_load_snapshot_async_from_dict(): + await test_load_snapshot_async(request_type=dict) + + +def test_load_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_load_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.EnvironmentsGrpcTransport( @@ -2254,6 +2542,8 @@ def test_environments_base_transport(): "list_environments", "update_environment", "delete_environment", + "save_snapshot", + "load_snapshot", ) for method in methods: with pytest.raises(NotImplementedError): diff --git a/tests/unit/gapic/service_v1beta1/test_environments.py b/tests/unit/gapic/service_v1beta1/test_environments.py index b878086..74b036d 100644 --- a/tests/unit/gapic/service_v1beta1/test_environments.py +++ b/tests/unit/gapic/service_v1beta1/test_environments.py @@ -2413,6 +2413,294 @@ async def test_check_upgrade_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + environments.SaveSnapshotRequest, + dict, + ], +) +def test_save_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_save_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + client.save_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + +@pytest.mark.asyncio +async def test_save_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.SaveSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_save_snapshot_async_from_dict(): + await test_save_snapshot_async(request_type=dict) + + +def test_save_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_save_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.LoadSnapshotRequest, + dict, + ], +) +def test_load_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_load_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + client.load_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + +@pytest.mark.asyncio +async def test_load_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.LoadSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_load_snapshot_async_from_dict(): + await test_load_snapshot_async(request_type=dict) + + +def test_load_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_load_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.EnvironmentsGrpcTransport( @@ -2557,6 +2845,8 @@ def test_environments_base_transport(): "delete_environment", "restart_web_server", "check_upgrade", + "save_snapshot", + "load_snapshot", ) for method in methods: with pytest.raises(NotImplementedError): From 9b1d61e4cb24023ca831e83799ccc61fc398d335 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Dec 2022 10:28:00 -0500 Subject: [PATCH 5/7] fix(deps): Require google-api-core >=1.34.0, >=2.11.0 (#151) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(deps): Require google-api-core >=1.34.0, >=2.11.0 fix: Drop usage of pkg_resources fix: Fix timeout default values docs(samples): Snippetgen should call await on the operation coroutine before calling result PiperOrigin-RevId: 493260409 Source-Link: https://github.com/googleapis/googleapis/commit/fea43879f83a8d0dacc9353b3f75f8f46d37162f Source-Link: https://github.com/googleapis/googleapis-gen/commit/387b7344c7529ee44be84e613b19a820508c612b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzg3YjczNDRjNzUyOWVlNDRiZTg0ZTYxM2IxOWE4MjA1MDhjNjEyYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add gapic_version.py Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .coveragerc | 5 -- .../airflow/service_v1/gapic_version.py | 16 +++++++ .../services/environments/async_client.py | 40 ++++++++-------- .../services/environments/client.py | 30 ++++++------ .../services/environments/transports/base.py | 15 +++--- .../services/image_versions/async_client.py | 18 ++++--- .../services/image_versions/client.py | 18 ++++--- .../image_versions/transports/base.py | 15 +++--- .../airflow/service_v1beta1/gapic_version.py | 16 +++++++ .../services/environments/async_client.py | 48 +++++++++---------- .../services/environments/client.py | 34 +++++++------ .../services/environments/transports/base.py | 15 +++--- .../services/image_versions/async_client.py | 18 ++++--- .../services/image_versions/client.py | 18 ++++--- .../image_versions/transports/base.py | 15 +++--- release-please-config.json | 2 + ...d_environments_create_environment_async.py | 2 +- ...d_environments_delete_environment_async.py | 2 +- ...erated_environments_load_snapshot_async.py | 2 +- ...erated_environments_save_snapshot_async.py | 2 +- ...d_environments_update_environment_async.py | 2 +- ...erated_environments_check_upgrade_async.py | 2 +- ...d_environments_create_environment_async.py | 2 +- ...d_environments_delete_environment_async.py | 2 +- ...erated_environments_load_snapshot_async.py | 2 +- ...d_environments_restart_web_server_async.py | 2 +- ...erated_environments_save_snapshot_async.py | 2 +- ...d_environments_update_environment_async.py | 2 +- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- 30 files changed, 176 insertions(+), 175 deletions(-) create mode 100644 google/cloud/orchestration/airflow/service_v1/gapic_version.py create mode 100644 google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py diff --git a/.coveragerc b/.coveragerc index 69b1869..8dc8f6b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/google/cloud/orchestration/airflow/service_v1/gapic_version.py b/google/cloud/orchestration/airflow/service_v1/gapic_version.py new file mode 100644 index 0000000..25e4dd6 --- /dev/null +++ b/google/cloud/orchestration/airflow/service_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.4.4" # {x-release-please-version} diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py b/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py index 38b8a2e..70b84b9 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py @@ -34,7 +34,10 @@ from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.orchestration.airflow.service_v1 import ( + gapic_version as package_version, +) try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -222,7 +225,7 @@ async def create_environment( parent: Optional[str] = None, environment: Optional[environments.Environment] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Create a new environment. @@ -251,7 +254,7 @@ async def sample_create_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -344,7 +347,7 @@ async def get_environment( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> environments.Environment: r"""Get an existing environment. @@ -445,7 +448,7 @@ async def list_environments( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEnvironmentsAsyncPager: r"""List environments. @@ -562,7 +565,7 @@ async def update_environment( environment: Optional[environments.Environment] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Update an environment. @@ -591,7 +594,7 @@ async def sample_update_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -873,7 +876,7 @@ async def delete_environment( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Delete an environment. @@ -902,7 +905,7 @@ async def sample_delete_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -995,7 +998,7 @@ async def save_snapshot( request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a snapshots of a Cloud Composer environment. @@ -1027,7 +1030,7 @@ async def sample_save_snapshot(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1094,7 +1097,7 @@ async def load_snapshot( request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Loads a snapshot of a Cloud Composer environment. @@ -1126,7 +1129,7 @@ async def sample_load_snapshot(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1195,14 +1198,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("EnvironmentsAsyncClient",) diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/client.py b/google/cloud/orchestration/airflow/service_v1/services/environments/client.py index 59cba2d..68cb690 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/client.py @@ -38,7 +38,10 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.orchestration.airflow.service_v1 import ( + gapic_version as package_version, +) try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -453,7 +456,7 @@ def create_environment( parent: Optional[str] = None, environment: Optional[environments.Environment] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Create a new environment. @@ -575,7 +578,7 @@ def get_environment( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> environments.Environment: r"""Get an existing environment. @@ -676,7 +679,7 @@ def list_environments( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEnvironmentsPager: r"""List environments. @@ -793,7 +796,7 @@ def update_environment( environment: Optional[environments.Environment] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Update an environment. @@ -1104,7 +1107,7 @@ def delete_environment( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Delete an environment. @@ -1226,7 +1229,7 @@ def save_snapshot( request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a snapshots of a Cloud Composer environment. @@ -1326,7 +1329,7 @@ def load_snapshot( request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Loads a snapshot of a Cloud Composer environment. @@ -1435,14 +1438,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("EnvironmentsClient",) diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py index 98f65e1..2ca7f78 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py @@ -24,18 +24,15 @@ from google.auth import credentials as ga_credentials # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources +from google.cloud.orchestration.airflow.service_v1 import ( + gapic_version as package_version, +) from google.cloud.orchestration.airflow.service_v1.types import environments -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class EnvironmentsTransport(abc.ABC): diff --git a/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py b/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py index 9143806..3cb959a 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/image_versions/async_client.py @@ -34,7 +34,10 @@ from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.orchestration.airflow.service_v1 import ( + gapic_version as package_version, +) try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -217,7 +220,7 @@ async def list_image_versions( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListImageVersionsAsyncPager: r"""List ImageVersions for provided location. @@ -333,14 +336,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ImageVersionsAsyncClient",) diff --git a/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py b/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py index 7f59bd3..746d237 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/image_versions/client.py @@ -38,7 +38,10 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.orchestration.airflow.service_v1 import ( + gapic_version as package_version, +) try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -422,7 +425,7 @@ def list_image_versions( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListImageVersionsPager: r"""List ImageVersions for provided location. @@ -545,14 +548,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ImageVersionsClient",) diff --git a/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/base.py b/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/base.py index 733dbe2..34b20e5 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1/services/image_versions/transports/base.py @@ -23,18 +23,15 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources +from google.cloud.orchestration.airflow.service_v1 import ( + gapic_version as package_version, +) from google.cloud.orchestration.airflow.service_v1.types import image_versions -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class ImageVersionsTransport(abc.ABC): diff --git a/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py b/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py new file mode 100644 index 0000000..25e4dd6 --- /dev/null +++ b/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.4.4" # {x-release-please-version} diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py index 66540ce..e25ea0d 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py @@ -34,7 +34,10 @@ from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.orchestration.airflow.service_v1beta1 import ( + gapic_version as package_version, +) try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -227,7 +230,7 @@ async def create_environment( parent: Optional[str] = None, environment: Optional[environments.Environment] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Create a new environment. @@ -256,7 +259,7 @@ async def sample_create_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -349,7 +352,7 @@ async def get_environment( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> environments.Environment: r"""Get an existing environment. @@ -450,7 +453,7 @@ async def list_environments( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEnvironmentsAsyncPager: r"""List environments. @@ -567,7 +570,7 @@ async def update_environment( environment: Optional[environments.Environment] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Update an environment. @@ -596,7 +599,7 @@ async def sample_update_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -915,7 +918,7 @@ async def delete_environment( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Delete an environment. @@ -944,7 +947,7 @@ async def sample_delete_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1037,7 +1040,7 @@ async def restart_web_server( request: Optional[Union[environments.RestartWebServerRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Restart Airflow web server. @@ -1066,7 +1069,7 @@ async def sample_restart_web_server(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1130,7 +1133,7 @@ async def check_upgrade( request: Optional[Union[environments.CheckUpgradeRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Check if an upgrade operation on the environment will @@ -1162,7 +1165,7 @@ async def sample_check_upgrade(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1228,7 +1231,7 @@ async def save_snapshot( request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a snapshots of a Cloud Composer environment. @@ -1260,7 +1263,7 @@ async def sample_save_snapshot(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1327,7 +1330,7 @@ async def load_snapshot( request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Loads a snapshot of a Cloud Composer environment. @@ -1359,7 +1362,7 @@ async def sample_load_snapshot(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1428,14 +1431,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow-service", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("EnvironmentsAsyncClient",) diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py index 149d3ac..2ffcc35 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py @@ -38,7 +38,10 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.orchestration.airflow.service_v1beta1 import ( + gapic_version as package_version, +) try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -458,7 +461,7 @@ def create_environment( parent: Optional[str] = None, environment: Optional[environments.Environment] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Create a new environment. @@ -580,7 +583,7 @@ def get_environment( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> environments.Environment: r"""Get an existing environment. @@ -681,7 +684,7 @@ def list_environments( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEnvironmentsPager: r"""List environments. @@ -798,7 +801,7 @@ def update_environment( environment: Optional[environments.Environment] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Update an environment. @@ -1146,7 +1149,7 @@ def delete_environment( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Delete an environment. @@ -1268,7 +1271,7 @@ def restart_web_server( request: Optional[Union[environments.RestartWebServerRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Restart Airflow web server. @@ -1362,7 +1365,7 @@ def check_upgrade( request: Optional[Union[environments.CheckUpgradeRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Check if an upgrade operation on the environment will @@ -1461,7 +1464,7 @@ def save_snapshot( request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a snapshots of a Cloud Composer environment. @@ -1561,7 +1564,7 @@ def load_snapshot( request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Loads a snapshot of a Cloud Composer environment. @@ -1670,14 +1673,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow-service", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("EnvironmentsClient",) diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py index e28de0c..f8a31db 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py @@ -24,18 +24,15 @@ from google.auth import credentials as ga_credentials # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources +from google.cloud.orchestration.airflow.service_v1beta1 import ( + gapic_version as package_version, +) from google.cloud.orchestration.airflow.service_v1beta1.types import environments -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow-service", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class EnvironmentsTransport(abc.ABC): diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py index 0bf4539..a95d9a0 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/async_client.py @@ -34,7 +34,10 @@ from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.orchestration.airflow.service_v1beta1 import ( + gapic_version as package_version, +) try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -219,7 +222,7 @@ async def list_image_versions( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListImageVersionsAsyncPager: r"""List ImageVersions for provided location. @@ -335,14 +338,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow-service", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ImageVersionsAsyncClient",) diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py index d733c4f..7029b7f 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/client.py @@ -38,7 +38,10 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.orchestration.airflow.service_v1beta1 import ( + gapic_version as package_version, +) try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -424,7 +427,7 @@ def list_image_versions( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListImageVersionsPager: r"""List ImageVersions for provided location. @@ -547,14 +550,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow-service", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ImageVersionsClient",) diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/base.py b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/base.py index 102d5b3..ba079a5 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/image_versions/transports/base.py @@ -23,18 +23,15 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources +from google.cloud.orchestration.airflow.service_v1beta1 import ( + gapic_version as package_version, +) from google.cloud.orchestration.airflow.service_v1beta1.types import image_versions -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-orchestration-airflow-service", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class ImageVersionsTransport(abc.ABC): diff --git a/release-please-config.json b/release-please-config.json index 45c8680..c281718 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -4,6 +4,8 @@ ".": { "release-type": "python", "extra-files": [ + "google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py", + "google/cloud/orchestration/airflow/service_v1/gapic_version.py", "google/cloud/orchestration/airflow/service/gapic_version.py", { "type": "json", diff --git a/samples/generated_samples/composer_v1_generated_environments_create_environment_async.py b/samples/generated_samples/composer_v1_generated_environments_create_environment_async.py index f4ea9b0..d0655bf 100644 --- a/samples/generated_samples/composer_v1_generated_environments_create_environment_async.py +++ b/samples/generated_samples/composer_v1_generated_environments_create_environment_async.py @@ -47,7 +47,7 @@ async def sample_create_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1_generated_environments_delete_environment_async.py b/samples/generated_samples/composer_v1_generated_environments_delete_environment_async.py index 7038e83..ea715af 100644 --- a/samples/generated_samples/composer_v1_generated_environments_delete_environment_async.py +++ b/samples/generated_samples/composer_v1_generated_environments_delete_environment_async.py @@ -47,7 +47,7 @@ async def sample_delete_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py index b7776e0..953a029 100644 --- a/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py +++ b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py @@ -47,7 +47,7 @@ async def sample_load_snapshot(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py index ec387fe..c580bdd 100644 --- a/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py +++ b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py @@ -47,7 +47,7 @@ async def sample_save_snapshot(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1_generated_environments_update_environment_async.py b/samples/generated_samples/composer_v1_generated_environments_update_environment_async.py index 33d7de9..a0cbc7e 100644 --- a/samples/generated_samples/composer_v1_generated_environments_update_environment_async.py +++ b/samples/generated_samples/composer_v1_generated_environments_update_environment_async.py @@ -47,7 +47,7 @@ async def sample_update_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_async.py index 7eaa946..6f832b6 100644 --- a/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_async.py +++ b/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_async.py @@ -47,7 +47,7 @@ async def sample_check_upgrade(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_async.py index 3f6d69d..4e1a614 100644 --- a/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_async.py +++ b/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_async.py @@ -47,7 +47,7 @@ async def sample_create_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_async.py index cb1b567..e0b793c 100644 --- a/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_async.py +++ b/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_async.py @@ -47,7 +47,7 @@ async def sample_delete_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py index fbd0dae..9a689ea 100644 --- a/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py +++ b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py @@ -47,7 +47,7 @@ async def sample_load_snapshot(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_async.py index 0b9da12..2c26cfb 100644 --- a/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_async.py +++ b/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_async.py @@ -47,7 +47,7 @@ async def sample_restart_web_server(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py index de0f02a..e007dcd 100644 --- a/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py +++ b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py @@ -47,7 +47,7 @@ async def sample_save_snapshot(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_async.py index f871fb5..a848381 100644 --- a/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_async.py +++ b/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_async.py @@ -47,7 +47,7 @@ async def sample_update_environment(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/setup.py b/setup.py index 577f135..4f4e527 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 6f3158c..6c44adf 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 From 7b4b43d32025f90c273ee820d7995361e6e0275c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Dec 2022 14:28:57 -0500 Subject: [PATCH 6/7] build(deps): bump certifi from 2022.9.24 to 2022.12.7 in /synthtool/gcp/templates/python_library/.kokoro (#152) Source-Link: https://github.com/googleapis/synthtool/commit/b4fe62efb5114b6738ad4b13d6f654f2bf4b7cc0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 6 +++--- .pre-commit-config.yaml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3f1ccc0..fccaa8e 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 9c1b9be..05dc467 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.9.24 \ - --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ - --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 46d2371..5405cc8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: rev: 22.3.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: 3.9.2 hooks: - id: flake8 From 007bbb8f386b938fbf73851ec8ff093f5c14ffaa Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 13 Dec 2022 14:09:43 -0500 Subject: [PATCH 7/7] chore(main): release 1.5.0 (#149) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 27 +++++++++++++++++++ .../airflow/service/gapic_version.py | 2 +- .../airflow/service_v1/gapic_version.py | 2 +- .../airflow/service_v1beta1/gapic_version.py | 2 +- ...loud.orchestration.airflow.service.v1.json | 2 +- ...orchestration.airflow.service.v1beta1.json | 2 +- 7 files changed, 33 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 50f0c45..dd8fde7 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.4.4" + ".": "1.5.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a481df..5bac962 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,32 @@ # Changelog +## [1.5.0](https://github.com/googleapis/python-orchestration-airflow/compare/v1.4.4...v1.5.0) (2022-12-13) + + +### Features + +* add support for `google.cloud.orchestration.airflow.service.__version__` ([8edf594](https://github.com/googleapis/python-orchestration-airflow/commit/8edf5948c6a59e5172c042faf5c40d98066b52a0)) +* Add typing to proto.Message based class attributes ([8edf594](https://github.com/googleapis/python-orchestration-airflow/commit/8edf5948c6a59e5172c042faf5c40d98066b52a0)) +* added field enable_ip_masq_agent to NodeConfig ([6c8a0bf](https://github.com/googleapis/python-orchestration-airflow/commit/6c8a0bf722793353ca9311410f245451bbdf437c)) +* added field scheduler_count to SoftwareConfig ([6c8a0bf](https://github.com/googleapis/python-orchestration-airflow/commit/6c8a0bf722793353ca9311410f245451bbdf437c)) +* added fields cloud_composer_network_ipv4_cidr_block, cloud_composer_network_ipv4_reserved_range, enable_privately_used_public_ips, cloud_composer_connection_subnetwork, networking_config to PrivateEnvironmentConfig ([6c8a0bf](https://github.com/googleapis/python-orchestration-airflow/commit/6c8a0bf722793353ca9311410f245451bbdf437c)) +* added fields maintenance_window, workloads_config, environment_size, master_authorized_networks_config, recovery_config to EnvironmentConfig ([6c8a0bf](https://github.com/googleapis/python-orchestration-airflow/commit/6c8a0bf722793353ca9311410f245451bbdf437c)) +* Added LoadSnapshot, SaveSnapshot RPCs ([#150](https://github.com/googleapis/python-orchestration-airflow/issues/150)) ([6c8a0bf](https://github.com/googleapis/python-orchestration-airflow/commit/6c8a0bf722793353ca9311410f245451bbdf437c)) + + +### Bug Fixes + +* Add dict typing for client_options ([8edf594](https://github.com/googleapis/python-orchestration-airflow/commit/8edf5948c6a59e5172c042faf5c40d98066b52a0)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([9b1d61e](https://github.com/googleapis/python-orchestration-airflow/commit/9b1d61e4cb24023ca831e83799ccc61fc398d335)) +* Drop usage of pkg_resources ([9b1d61e](https://github.com/googleapis/python-orchestration-airflow/commit/9b1d61e4cb24023ca831e83799ccc61fc398d335)) +* Fix timeout default values ([9b1d61e](https://github.com/googleapis/python-orchestration-airflow/commit/9b1d61e4cb24023ca831e83799ccc61fc398d335)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([8edf594](https://github.com/googleapis/python-orchestration-airflow/commit/8edf5948c6a59e5172c042faf5c40d98066b52a0)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([9b1d61e](https://github.com/googleapis/python-orchestration-airflow/commit/9b1d61e4cb24023ca831e83799ccc61fc398d335)) + ## [1.4.4](https://github.com/googleapis/python-orchestration-airflow/compare/v1.4.3...v1.4.4) (2022-10-07) diff --git a/google/cloud/orchestration/airflow/service/gapic_version.py b/google/cloud/orchestration/airflow/service/gapic_version.py index 25e4dd6..997edc1 100644 --- a/google/cloud/orchestration/airflow/service/gapic_version.py +++ b/google/cloud/orchestration/airflow/service/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.4" # {x-release-please-version} +__version__ = "1.5.0" # {x-release-please-version} diff --git a/google/cloud/orchestration/airflow/service_v1/gapic_version.py b/google/cloud/orchestration/airflow/service_v1/gapic_version.py index 25e4dd6..997edc1 100644 --- a/google/cloud/orchestration/airflow/service_v1/gapic_version.py +++ b/google/cloud/orchestration/airflow/service_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.4" # {x-release-please-version} +__version__ = "1.5.0" # {x-release-please-version} diff --git a/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py b/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py index 25e4dd6..997edc1 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.4" # {x-release-please-version} +__version__ = "1.5.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json index afcb961..e0b8764 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-orchestration-airflow", - "version": "0.1.0" + "version": "1.5.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json index 5ba2ed3..eb3116b 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-orchestration-airflow-service", - "version": "0.1.0" + "version": "1.5.0" }, "snippets": [ {