diff --git a/google/cloud/orchestration/airflow/service/__init__.py b/google/cloud/orchestration/airflow/service/__init__.py index 1d4216c..660dedc 100644 --- a/google/cloud/orchestration/airflow/service/__init__.py +++ b/google/cloud/orchestration/airflow/service/__init__.py @@ -42,13 +42,23 @@ IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, + MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, + WorkloadsConfig, ) from google.cloud.orchestration.airflow.service_v1.types.image_versions import ( ImageVersion, @@ -75,13 +85,23 @@ "IPAllocationPolicy", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", + "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", + "WorkloadsConfig", "ImageVersion", "ListImageVersionsRequest", "ListImageVersionsResponse", diff --git a/google/cloud/orchestration/airflow/service_v1/__init__.py b/google/cloud/orchestration/airflow/service_v1/__init__.py index c8b3ab7..305b555 100644 --- a/google/cloud/orchestration/airflow/service_v1/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1/__init__.py @@ -32,13 +32,23 @@ IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, + MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, + WorkloadsConfig, ) from .types.image_versions import ( ImageVersion, @@ -66,12 +76,22 @@ "ListEnvironmentsResponse", "ListImageVersionsRequest", "ListImageVersionsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", + "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", "OperationMetadata", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", + "WorkloadsConfig", ) diff --git a/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json b/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json index a98b49c..1c4eaec 100644 --- a/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json +++ b/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json @@ -30,6 +30,16 @@ "list_environments" ] }, + "LoadSnapshot": { + "methods": [ + "load_snapshot" + ] + }, + "SaveSnapshot": { + "methods": [ + "save_snapshot" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" @@ -60,6 +70,16 @@ "list_environments" ] }, + "LoadSnapshot": { + "methods": [ + "load_snapshot" + ] + }, + "SaveSnapshot": { + "methods": [ + "save_snapshot" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py b/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py index 22972a4..38b8a2e 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py @@ -731,23 +731,14 @@ async def sample_update_environment(): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` - field. + field. Supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. - ``config.webServerNetworkAccessControl`` - Replace the environment's current ``WebServerNetworkAccessControl``. - - ``config.databaseConfig`` - - - Replace the environment's current - ``DatabaseConfig``. - - - ``config.webServerConfig`` - - - Replace the environment's current - ``WebServerConfig``. - - ``config.softwareConfig.airflowConfigOverrides`` - Replace all Apache Airflow config overrides. If a @@ -775,9 +766,34 @@ async def sample_update_environment(): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom - environment variables are cleared. It is an error - to provide both this mask and a mask specifying - one or more individual environment variables. + environment variables are cleared. + + - ``config.softwareConfig.schedulerCount`` + + - Horizontally scale the number of schedulers in + Airflow. A positive integer not greater than the + number of nodes must be provided in the + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + + - ``config.databaseConfig.machineType`` + + - Cloud SQL machine type used by Airflow database. + It has to be one of: db-n1-standard-2, + db-n1-standard-4, db-n1-standard-8 or + db-n1-standard-16. Supported for Cloud Composer + environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + + - Machine type on which Airflow web server is + running. It has to be one of: + composer-n1-webserver-2, composer-n1-webserver-4 + or composer-n1-webserver-8. Supported for Cloud + Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -974,6 +990,204 @@ async def sample_delete_environment(): # Done; return the response. return response + async def save_snapshot( + self, + request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest, dict]]): + The request object. Request to create a snapshot of a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotResponse` + Response to SaveSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.SaveSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.save_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.SaveSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def load_snapshot( + self, + request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest, dict]]): + The request object. Request to load a snapshot into a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotResponse` + Response to LoadSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.LoadSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.load_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.LoadSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/client.py b/google/cloud/orchestration/airflow/service_v1/services/environments/client.py index 8dd9f4b..59cba2d 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/client.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/client.py @@ -962,23 +962,14 @@ def sample_update_environment(): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` - field. + field. Supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. - ``config.webServerNetworkAccessControl`` - Replace the environment's current ``WebServerNetworkAccessControl``. - - ``config.databaseConfig`` - - - Replace the environment's current - ``DatabaseConfig``. - - - ``config.webServerConfig`` - - - Replace the environment's current - ``WebServerConfig``. - - ``config.softwareConfig.airflowConfigOverrides`` - Replace all Apache Airflow config overrides. If a @@ -1006,9 +997,34 @@ def sample_update_environment(): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom - environment variables are cleared. It is an error - to provide both this mask and a mask specifying - one or more individual environment variables. + environment variables are cleared. + + - ``config.softwareConfig.schedulerCount`` + + - Horizontally scale the number of schedulers in + Airflow. A positive integer not greater than the + number of nodes must be provided in the + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + + - ``config.databaseConfig.machineType`` + + - Cloud SQL machine type used by Airflow database. + It has to be one of: db-n1-standard-2, + db-n1-standard-4, db-n1-standard-8 or + db-n1-standard-16. Supported for Cloud Composer + environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + + - Machine type on which Airflow web server is + running. It has to be one of: + composer-n1-webserver-2, composer-n1-webserver-4 + or composer-n1-webserver-8. Supported for Cloud + Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1205,6 +1221,206 @@ def sample_delete_environment(): # Done; return the response. return response + def save_snapshot( + self, + request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest, dict]): + The request object. Request to create a snapshot of a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotResponse` + Response to SaveSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.SaveSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.SaveSnapshotRequest): + request = environments.SaveSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.save_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.SaveSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + def load_snapshot( + self, + request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest, dict]): + The request object. Request to load a snapshot into a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotResponse` + Response to LoadSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.LoadSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.LoadSnapshotRequest): + request = environments.LoadSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.load_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.LoadSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py index 8857232..98f65e1 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py @@ -153,6 +153,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.save_snapshot: gapic_v1.method.wrap_method( + self.save_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.load_snapshot: gapic_v1.method.wrap_method( + self.load_snapshot, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -217,6 +227,24 @@ def delete_environment( ]: raise NotImplementedError() + @property + def save_snapshot( + self, + ) -> Callable[ + [environments.SaveSnapshotRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def load_snapshot( + self, + ) -> Callable[ + [environments.LoadSnapshotRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py index 5c43736..bae6716 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py @@ -376,6 +376,64 @@ def delete_environment( ) return self._stubs["delete_environment"] + @property + def save_snapshot( + self, + ) -> Callable[[environments.SaveSnapshotRequest], operations_pb2.Operation]: + r"""Return a callable for the save snapshot method over gRPC. + + Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + Returns: + Callable[[~.SaveSnapshotRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "save_snapshot" not in self._stubs: + self._stubs["save_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/SaveSnapshot", + request_serializer=environments.SaveSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["save_snapshot"] + + @property + def load_snapshot( + self, + ) -> Callable[[environments.LoadSnapshotRequest], operations_pb2.Operation]: + r"""Return a callable for the load snapshot method over gRPC. + + Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + Returns: + Callable[[~.LoadSnapshotRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_snapshot" not in self._stubs: + self._stubs["load_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/LoadSnapshot", + request_serializer=environments.LoadSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_snapshot"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py index d14cbb1..b27b83a 100644 --- a/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py +++ b/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py @@ -390,6 +390,68 @@ def delete_environment( ) return self._stubs["delete_environment"] + @property + def save_snapshot( + self, + ) -> Callable[ + [environments.SaveSnapshotRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the save snapshot method over gRPC. + + Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + Returns: + Callable[[~.SaveSnapshotRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "save_snapshot" not in self._stubs: + self._stubs["save_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/SaveSnapshot", + request_serializer=environments.SaveSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["save_snapshot"] + + @property + def load_snapshot( + self, + ) -> Callable[ + [environments.LoadSnapshotRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the load snapshot method over gRPC. + + Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + Returns: + Callable[[~.LoadSnapshotRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_snapshot" not in self._stubs: + self._stubs["load_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/LoadSnapshot", + request_serializer=environments.LoadSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_snapshot"] + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/orchestration/airflow/service_v1/types/__init__.py b/google/cloud/orchestration/airflow/service_v1/types/__init__.py index d1be10d..7f77a37 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1/types/__init__.py @@ -25,13 +25,23 @@ IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, + MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, + WorkloadsConfig, ) from .image_versions import ( ImageVersion, @@ -52,13 +62,23 @@ "IPAllocationPolicy", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", + "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", + "WorkloadsConfig", "ImageVersion", "ListImageVersionsRequest", "ListImageVersionsResponse", diff --git a/google/cloud/orchestration/airflow/service_v1/types/environments.py b/google/cloud/orchestration/airflow/service_v1/types/environments.py index e29369d..c1cf78f 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/environments.py +++ b/google/cloud/orchestration/airflow/service_v1/types/environments.py @@ -28,16 +28,26 @@ "ListEnvironmentsResponse", "DeleteEnvironmentRequest", "UpdateEnvironmentRequest", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", "EnvironmentConfig", "WebServerNetworkAccessControl", "DatabaseConfig", "WebServerConfig", "EncryptionConfig", + "MaintenanceWindow", "SoftwareConfig", "IPAllocationPolicy", "NodeConfig", "PrivateClusterConfig", + "NetworkingConfig", "PrivateEnvironmentConfig", + "WorkloadsConfig", + "RecoveryConfig", + "ScheduledSnapshotsConfig", + "MasterAuthorizedNetworksConfig", "Environment", "CheckUpgradeResponse", }, @@ -276,20 +286,14 @@ class UpdateEnvironmentRequest(proto.Message): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` field. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. - ``config.webServerNetworkAccessControl`` - Replace the environment's current ``WebServerNetworkAccessControl``. - - ``config.databaseConfig`` - - - Replace the environment's current ``DatabaseConfig``. - - - ``config.webServerConfig`` - - - Replace the environment's current ``WebServerConfig``. - - ``config.softwareConfig.airflowConfigOverrides`` - Replace all Apache Airflow config overrides. If a @@ -314,9 +318,32 @@ class UpdateEnvironmentRequest(proto.Message): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom environment variables are - cleared. It is an error to provide both this mask and - a mask specifying one or more individual environment - variables. + cleared. + + - ``config.softwareConfig.schedulerCount`` + + - Horizontally scale the number of schedulers in + Airflow. A positive integer not greater than the + number of nodes must be provided in the + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-2.*.*. + + - ``config.databaseConfig.machineType`` + + - Cloud SQL machine type used by Airflow database. It + has to be one of: db-n1-standard-2, db-n1-standard-4, + db-n1-standard-8 or db-n1-standard-16. Supported for + Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + + - Machine type on which Airflow web server is running. + It has to be one of: composer-n1-webserver-2, + composer-n1-webserver-4 or composer-n1-webserver-8. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. """ name: str = proto.Field( @@ -335,6 +362,103 @@ class UpdateEnvironmentRequest(proto.Message): ) +class SaveSnapshotRequest(proto.Message): + r"""Request to create a snapshot of a Cloud Composer environment. + + Attributes: + environment (str): + The resource name of the source environment + in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + snapshot_location (str): + Location in a Cloud Storage where the + snapshot is going to be stored, e.g.: + "gs://my-bucket/snapshots". + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SaveSnapshotResponse(proto.Message): + r"""Response to SaveSnapshotRequest. + + Attributes: + snapshot_path (str): + The fully-resolved Cloud Storage path of the created + snapshot, e.g.: + "gs://my-bucket/snapshots/project_location_environment_timestamp". + This field is populated only if the snapshot creation was + successful. + """ + + snapshot_path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LoadSnapshotRequest(proto.Message): + r"""Request to load a snapshot into a Cloud Composer environment. + + Attributes: + environment (str): + The resource name of the target environment + in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + snapshot_path (str): + A Cloud Storage path to a snapshot to load, e.g.: + "gs://my-bucket/snapshots/project_location_environment_timestamp". + skip_pypi_packages_installation (bool): + Whether or not to skip installing Pypi + packages when loading the environment's state. + skip_environment_variables_setting (bool): + Whether or not to skip setting environment + variables when loading the environment's state. + skip_airflow_overrides_setting (bool): + Whether or not to skip setting Airflow + overrides when loading the environment's state. + skip_gcs_data_copying (bool): + Whether or not to skip copying Cloud Storage + data when loading the environment's state. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_path: str = proto.Field( + proto.STRING, + number=2, + ) + skip_pypi_packages_installation: bool = proto.Field( + proto.BOOL, + number=3, + ) + skip_environment_variables_setting: bool = proto.Field( + proto.BOOL, + number=4, + ) + skip_airflow_overrides_setting: bool = proto.Field( + proto.BOOL, + number=5, + ) + skip_gcs_data_copying: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class LoadSnapshotResponse(proto.Message): + r"""Response to LoadSnapshotRequest.""" + + class EnvironmentConfig(proto.Message): r"""Configuration information for an environment. @@ -351,9 +475,11 @@ class EnvironmentConfig(proto.Message): for this environment reside in a simulated directory with the given prefix. node_count (int): - The number of nodes in the Kubernetes Engine - cluster that will be used to run this - environment. + The number of nodes in the Kubernetes Engine cluster that + will be used to run this environment. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. software_config (google.cloud.orchestration.airflow.service_v1.types.SoftwareConfig): The configuration settings for software inside the environment. @@ -379,12 +505,65 @@ class EnvironmentConfig(proto.Message): Optional. The encryption options for the Cloud Composer environment and its dependencies. Cannot be updated. + maintenance_window (google.cloud.orchestration.airflow.service_v1.types.MaintenanceWindow): + Optional. The maintenance window is the + period when Cloud Composer components may + undergo maintenance. It is defined so that + maintenance is not executed during peak hours or + critical time periods. + + The system will not be under maintenance for + every occurrence of this window, but when + maintenance is planned, it will be scheduled + during the window. + + The maintenance window period must encompass at + least 12 hours per week. This may be split into + multiple chunks, each with a size of at least 4 + hours. + + If this value is omitted, the default value for + maintenance window will be applied. The default + value is Saturday and Sunday 00-06 GMT. + workloads_config (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig): + Optional. The workloads configuration settings for the GKE + cluster associated with the Cloud Composer environment. The + GKE cluster runs Airflow scheduler, web server and workers + workloads. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. + environment_size (google.cloud.orchestration.airflow.service_v1.types.EnvironmentConfig.EnvironmentSize): + Optional. The size of the Cloud Composer environment. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. airflow_uri (str): Output only. The URI of the Apache Airflow Web UI hosted within this environment (see `Airflow web interface `__). + master_authorized_networks_config (google.cloud.orchestration.airflow.service_v1.types.MasterAuthorizedNetworksConfig): + Optional. The configuration options for GKE + cluster master authorized networks. By default + master authorized networks feature is: - in case + of private environment: enabled with no external + networks allowlisted. + - in case of public environment: disabled. + recovery_config (google.cloud.orchestration.airflow.service_v1.types.RecoveryConfig): + Optional. The Recovery settings configuration of an + environment. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. """ + class EnvironmentSize(proto.Enum): + r"""The size of the Cloud Composer environment.""" + ENVIRONMENT_SIZE_UNSPECIFIED = 0 + ENVIRONMENT_SIZE_SMALL = 1 + ENVIRONMENT_SIZE_MEDIUM = 2 + ENVIRONMENT_SIZE_LARGE = 3 + gke_cluster: str = proto.Field( proto.STRING, number=1, @@ -432,10 +611,35 @@ class EnvironmentConfig(proto.Message): number=11, message="EncryptionConfig", ) + maintenance_window: "MaintenanceWindow" = proto.Field( + proto.MESSAGE, + number=12, + message="MaintenanceWindow", + ) + workloads_config: "WorkloadsConfig" = proto.Field( + proto.MESSAGE, + number=15, + message="WorkloadsConfig", + ) + environment_size: EnvironmentSize = proto.Field( + proto.ENUM, + number=16, + enum=EnvironmentSize, + ) airflow_uri: str = proto.Field( proto.STRING, number=6, ) + master_authorized_networks_config: "MasterAuthorizedNetworksConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="MasterAuthorizedNetworksConfig", + ) + recovery_config: "RecoveryConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="RecoveryConfig", + ) class WebServerNetworkAccessControl(proto.Message): @@ -489,11 +693,11 @@ class DatabaseConfig(proto.Message): Attributes: machine_type (str): - Optional. Cloud SQL machine type used by - Airflow database. It has to be one of: - db-n1-standard-2, db-n1-standard-4, - db-n1-standard-8 or db-n1-standard-16. If not - specified, db-n1-standard-2 will be used. + Optional. Cloud SQL machine type used by Airflow database. + It has to be one of: db-n1-standard-2, db-n1-standard-4, + db-n1-standard-8 or db-n1-standard-16. If not specified, + db-n1-standard-2 will be used. Supported for Cloud Composer + environments in versions composer-1.\ *.*-airflow-*.*.*. """ machine_type: str = proto.Field( @@ -503,8 +707,9 @@ class DatabaseConfig(proto.Message): class WebServerConfig(proto.Message): - r"""The configuration settings for the Airflow web server App - Engine instance. + r"""The configuration settings for the Airflow web server App Engine + instance. Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.\* Attributes: machine_type (str): @@ -525,8 +730,9 @@ class WebServerConfig(proto.Message): class EncryptionConfig(proto.Message): - r"""The encryption options for the Cloud Composer environment - and its dependencies. + r"""The encryption options for the Cloud Composer environment and its + dependencies.Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. Attributes: kms_key_name (str): @@ -542,6 +748,54 @@ class EncryptionConfig(proto.Message): ) +class MaintenanceWindow(proto.Message): + r"""The configuration settings for Cloud Composer maintenance window. + The following example: + + :: + + { + "startTime":"2019-08-01T01:00:00Z" + "endTime":"2019-08-01T07:00:00Z" + "recurrence":"FREQ=WEEKLY;BYDAY=TU,WE" + } + + would define a maintenance window between 01 and 07 hours UTC during + each Tuesday and Wednesday. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Start time of the first recurrence + of the maintenance window. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Maintenance window end time. It is used only to + calculate the duration of the maintenance window. The value + for end-time must be in the future, relative to + ``start_time``. + recurrence (str): + Required. Maintenance window recurrence. Format is a subset + of `RFC-5545 `__ + ``RRULE``. The only allowed values for ``FREQ`` field are + ``FREQ=DAILY`` and ``FREQ=WEEKLY;BYDAY=...`` Example values: + ``FREQ=WEEKLY;BYDAY=TU,WE``, ``FREQ=DAILY``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + recurrence: str = proto.Field( + proto.STRING, + number=3, + ) + + class SoftwareConfig(proto.Message): r"""Specifies the selection and configuration of software inside the environment. @@ -552,25 +806,32 @@ class SoftwareConfig(proto.Message): encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression - ``composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?``. + ``composer-([0-9]+(\.[0-9]+\.[0-9]+(-preview\.[0-9]+)?)?|latest)-airflow-([0-9]+(\.[0-9]+(\.[0-9]+)?)?)``. When used as input, the server also checks if the provided version is supported and denies the request for an unsupported version. - The Cloud Composer portion of the version is a `semantic - version `__ or ``latest``. When the - patch version is omitted, the current Cloud Composer patch - version is selected. When ``latest`` is provided instead of - an explicit version number, the server replaces ``latest`` - with the current Cloud Composer version and stores that - version number in the same field. - - The portion of the image version that follows *airflow-* is - an official Apache Airflow repository `release - name `__. - - See also `Version - List `__. + The Cloud Composer portion of the image version is a full + `semantic version `__, or an alias in + the form of major version number or ``latest``. When an + alias is provided, the server replaces it with the current + Cloud Composer version that satisfies the alias. + + The Apache Airflow portion of the image version is a full + semantic version that points to one of the supported Apache + Airflow versions, or an alias in the form of only major or + major.minor versions specified. When an alias is provided, + the server replaces it with the latest Apache Airflow + version that satisfies the alias and is supported in the + given Cloud Composer version. + + In all cases, the resolved image version is stored in the + same field. + + See also `version + list `__ + and `versioning + overview `__. airflow_config_overrides (MutableMapping[str, str]): Optional. Apache Airflow configuration properties to override. @@ -624,11 +885,20 @@ class SoftwareConfig(proto.Message): - ``SQL_REGION`` - ``SQL_USER`` python_version (str): - Optional. The major version of Python used to - run the Apache Airflow scheduler, worker, and - webserver processes. - Can be set to '2' or '3'. If not specified, the - default is '3'. Cannot be updated. + Optional. The major version of Python used to run the Apache + Airflow scheduler, worker, and webserver processes. + + Can be set to '2' or '3'. If not specified, the default is + '3'. Cannot be updated. + + This field is only supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. Environments in + newer versions always use Python major version 3. + scheduler_count (int): + Optional. The number of schedulers for Airflow. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. """ image_version: str = proto.Field( @@ -654,6 +924,10 @@ class SoftwareConfig(proto.Message): proto.STRING, number=6, ) + scheduler_count: int = proto.Field( + proto.INT32, + number=7, + ) class IPAllocationPolicy(proto.Message): @@ -671,20 +945,26 @@ class IPAllocationPolicy(proto.Message): use_ip_aliases (bool): Optional. Whether or not to enable Alias IPs in the GKE cluster. If ``true``, a VPC-native cluster is created. + + This field is only supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. Environments in + newer versions always use VPC-native GKE clusters. cluster_secondary_range_name (str): Optional. The name of the GKE cluster's secondary range used to allocate IP addresses to pods. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. This field is a member of `oneof`_ ``cluster_ip_allocation``. cluster_ipv4_cidr_block (str): Optional. The IP address range used to allocate IP addresses to pods in the GKE cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. Set to blank to have GKE choose a range with the default size. @@ -693,7 +973,7 @@ class IPAllocationPolicy(proto.Message): with a specific netmask. Set to a - `CIDR `__ + `CIDR `__ notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, ``192.168.0.0/16``) to pick a specific range to use. @@ -703,16 +983,18 @@ class IPAllocationPolicy(proto.Message): Optional. The name of the services' secondary range used to allocate IP addresses to the GKE cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. This field is a member of `oneof`_ ``services_ip_allocation``. services_ipv4_cidr_block (str): Optional. The IP address range of the services IP addresses in this GKE cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. Set to blank to have GKE choose a range with the default size. @@ -721,7 +1003,7 @@ class IPAllocationPolicy(proto.Message): with a specific netmask. Set to a - `CIDR `__ + `CIDR `__ notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, ``192.168.0.0/16``) to pick a specific range to use. @@ -778,6 +1060,9 @@ class NodeConfig(proto.Message): one field (``location`` or ``nodeConfig.machineType``) is specified, the location information from the specified field will be propagated to the unspecified field. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. machine_type (str): Optional. The Compute Engine `machine type `__ used for cluster @@ -802,6 +1087,9 @@ class NodeConfig(proto.Message): If this field is unspecified, the ``machineTypeId`` defaults to "n1-standard-1". + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. network (str): Optional. The Compute Engine network to be used for machine communications, specified as a `relative resource @@ -826,14 +1114,20 @@ class NodeConfig(proto.Message): also be provided, and the subnetwork must belong to the enclosing environment's project and location. disk_size_gb (int): - Optional. The disk size in GB used for node - VMs. Minimum size is 20GB. If unspecified, - defaults to 100GB. Cannot be updated. + Optional. The disk size in GB used for node VMs. Minimum + size is 30GB. If unspecified, defaults to 100GB. Cannot be + updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. oauth_scopes (MutableSequence[str]): Optional. The set of Google API scopes to be made available on all node VMs. If ``oauth_scopes`` is empty, defaults to ["https://www.googleapis.com/auth/cloud-platform"]. Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. service_account (str): Optional. The Google Cloud Platform Service Account to be used by the node VMs. If a service @@ -846,9 +1140,20 @@ class NodeConfig(proto.Message): network firewalls. Each tag within the list must comply with `RFC1035 `__. Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. ip_allocation_policy (google.cloud.orchestration.airflow.service_v1.types.IPAllocationPolicy): Optional. The configuration for controlling how IPs are allocated in the GKE cluster. + enable_ip_masq_agent (bool): + Optional. Deploys 'ip-masq-agent' daemon set + in the GKE cluster and defines + nonMasqueradeCIDRs equals to pod IP range so IP + masquerading is used for all destination + addresses, except between pods traffic. + See: + https://cloud.google.com/kubernetes-engine/docs/how-to/ip-masquerade-agent """ location: str = proto.Field( @@ -888,6 +1193,10 @@ class NodeConfig(proto.Message): number=9, message="IPAllocationPolicy", ) + enable_ip_masq_agent: bool = proto.Field( + proto.BOOL, + number=11, + ) class PrivateClusterConfig(proto.Message): @@ -927,6 +1236,34 @@ class PrivateClusterConfig(proto.Message): ) +class NetworkingConfig(proto.Message): + r"""Configuration options for networking connections in the + Composer 2 environment. + + Attributes: + connection_type (google.cloud.orchestration.airflow.service_v1.types.NetworkingConfig.ConnectionType): + Optional. Indicates the user requested + specifc connection type between Tenant and + Customer projects. You cannot set networking + connection type in public IP environment. + """ + + class ConnectionType(proto.Enum): + r"""Represents connection type between Composer environment in + Customer Project and the corresponding Tenant project, from a + predefined list of available connection modes. + """ + CONNECTION_TYPE_UNSPECIFIED = 0 + VPC_PEERING = 1 + PRIVATE_SERVICE_CONNECT = 2 + + connection_type: ConnectionType = proto.Field( + proto.ENUM, + number=1, + enum=ConnectionType, + ) + + class PrivateEnvironmentConfig(proto.Message): r"""The configuration information for configuring a Private IP Cloud Composer environment. @@ -935,7 +1272,9 @@ class PrivateEnvironmentConfig(proto.Message): enable_private_environment (bool): Optional. If ``true``, a Private IP Cloud Composer environment is created. If this field is set to true, - ``IPAllocationPolicy.use_ip_aliases`` must be set to true. + ``IPAllocationPolicy.use_ip_aliases`` must be set to true + for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. private_cluster_config (google.cloud.orchestration.airflow.service_v1.types.PrivateClusterConfig): Optional. Configuration for the private GKE cluster for a Private IP Cloud Composer @@ -945,13 +1284,49 @@ class PrivateEnvironmentConfig(proto.Message): will be reserved. Needs to be disjoint from ``private_cluster_config.master_ipv4_cidr_block`` and ``cloud_sql_ipv4_cidr_block``. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. cloud_sql_ipv4_cidr_block (str): Optional. The CIDR block from which IP range in tenant project will be reserved for Cloud SQL. Needs to be disjoint from ``web_server_ipv4_cidr_block``. web_server_ipv4_reserved_range (str): - Output only. The IP range reserved for the - tenant project's App Engine VMs. + Output only. The IP range reserved for the tenant project's + App Engine VMs. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. + cloud_composer_network_ipv4_cidr_block (str): + Optional. The CIDR block from which IP range for Cloud + Composer Network in tenant project will be reserved. Needs + to be disjoint from + private_cluster_config.master_ipv4_cidr_block and + cloud_sql_ipv4_cidr_block. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. + cloud_composer_network_ipv4_reserved_range (str): + Output only. The IP range reserved for the tenant project's + Cloud Composer network. + + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. + enable_privately_used_public_ips (bool): + Optional. When enabled, IPs from public (non-RFC1918) ranges + can be used for + ``IPAllocationPolicy.cluster_ipv4_cidr_block`` and + ``IPAllocationPolicy.service_ipv4_cidr_block``. + cloud_composer_connection_subnetwork (str): + Optional. When specified, the environment + will use Private Service Connect instead of VPC + peerings to connect to Cloud SQL in the Tenant + Project, and the PSC endpoint in the Customer + Project will use an IP address from this + subnetwork. + networking_config (google.cloud.orchestration.airflow.service_v1.types.NetworkingConfig): + Optional. Configuration for the network + connections configuration in the environment. """ enable_private_environment: bool = proto.Field( @@ -975,6 +1350,266 @@ class PrivateEnvironmentConfig(proto.Message): proto.STRING, number=5, ) + cloud_composer_network_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=7, + ) + cloud_composer_network_ipv4_reserved_range: str = proto.Field( + proto.STRING, + number=8, + ) + enable_privately_used_public_ips: bool = proto.Field( + proto.BOOL, + number=6, + ) + cloud_composer_connection_subnetwork: str = proto.Field( + proto.STRING, + number=9, + ) + networking_config: "NetworkingConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="NetworkingConfig", + ) + + +class WorkloadsConfig(proto.Message): + r"""The Kubernetes workloads configuration for GKE cluster associated + with the Cloud Composer environment. Supported for Cloud Composer + environments in versions composer-2.\ *.*-airflow-*.*.\* and newer. + + Attributes: + scheduler (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig.SchedulerResource): + Optional. Resources used by Airflow + schedulers. + web_server (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig.WebServerResource): + Optional. Resources used by Airflow web + server. + worker (google.cloud.orchestration.airflow.service_v1.types.WorkloadsConfig.WorkerResource): + Optional. Resources used by Airflow workers. + """ + + class SchedulerResource(proto.Message): + r"""Configuration for resources used by Airflow schedulers. + + Attributes: + cpu (float): + Optional. CPU request and limit for a single + Airflow scheduler replica. + memory_gb (float): + Optional. Memory (GB) request and limit for a + single Airflow scheduler replica. + storage_gb (float): + Optional. Storage (GB) request and limit for + a single Airflow scheduler replica. + count (int): + Optional. The number of schedulers. + """ + + cpu: float = proto.Field( + proto.FLOAT, + number=1, + ) + memory_gb: float = proto.Field( + proto.FLOAT, + number=2, + ) + storage_gb: float = proto.Field( + proto.FLOAT, + number=3, + ) + count: int = proto.Field( + proto.INT32, + number=4, + ) + + class WebServerResource(proto.Message): + r"""Configuration for resources used by Airflow web server. + + Attributes: + cpu (float): + Optional. CPU request and limit for Airflow + web server. + memory_gb (float): + Optional. Memory (GB) request and limit for + Airflow web server. + storage_gb (float): + Optional. Storage (GB) request and limit for + Airflow web server. + """ + + cpu: float = proto.Field( + proto.FLOAT, + number=1, + ) + memory_gb: float = proto.Field( + proto.FLOAT, + number=2, + ) + storage_gb: float = proto.Field( + proto.FLOAT, + number=3, + ) + + class WorkerResource(proto.Message): + r"""Configuration for resources used by Airflow workers. + + Attributes: + cpu (float): + Optional. CPU request and limit for a single + Airflow worker replica. + memory_gb (float): + Optional. Memory (GB) request and limit for a + single Airflow worker replica. + storage_gb (float): + Optional. Storage (GB) request and limit for + a single Airflow worker replica. + min_count (int): + Optional. Minimum number of workers for + autoscaling. + max_count (int): + Optional. Maximum number of workers for + autoscaling. + """ + + cpu: float = proto.Field( + proto.FLOAT, + number=1, + ) + memory_gb: float = proto.Field( + proto.FLOAT, + number=2, + ) + storage_gb: float = proto.Field( + proto.FLOAT, + number=3, + ) + min_count: int = proto.Field( + proto.INT32, + number=4, + ) + max_count: int = proto.Field( + proto.INT32, + number=5, + ) + + scheduler: SchedulerResource = proto.Field( + proto.MESSAGE, + number=1, + message=SchedulerResource, + ) + web_server: WebServerResource = proto.Field( + proto.MESSAGE, + number=2, + message=WebServerResource, + ) + worker: WorkerResource = proto.Field( + proto.MESSAGE, + number=3, + message=WorkerResource, + ) + + +class RecoveryConfig(proto.Message): + r"""The Recovery settings of an environment. + + Attributes: + scheduled_snapshots_config (google.cloud.orchestration.airflow.service_v1.types.ScheduledSnapshotsConfig): + Optional. The configuration for scheduled + snapshot creation mechanism. + """ + + scheduled_snapshots_config: "ScheduledSnapshotsConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="ScheduledSnapshotsConfig", + ) + + +class ScheduledSnapshotsConfig(proto.Message): + r"""The configuration for scheduled snapshot creation mechanism. + + Attributes: + enabled (bool): + Optional. Whether scheduled snapshots + creation is enabled. + snapshot_location (str): + Optional. The Cloud Storage location for + storing automatically created snapshots. + snapshot_creation_schedule (str): + Optional. The cron expression representing + the time when snapshots creation mechanism runs. + This field is subject to additional validation + around frequency of execution. + time_zone (str): + Optional. Time zone that sets the context to interpret + snapshot_creation_schedule. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + snapshot_location: str = proto.Field( + proto.STRING, + number=6, + ) + snapshot_creation_schedule: str = proto.Field( + proto.STRING, + number=3, + ) + time_zone: str = proto.Field( + proto.STRING, + number=5, + ) + + +class MasterAuthorizedNetworksConfig(proto.Message): + r"""Configuration options for the master authorized networks + feature. Enabled master authorized networks will disallow all + external traffic to access Kubernetes master through HTTPS + except traffic from the given CIDR blocks, Google Compute Engine + Public IPs and Google Prod IPs. + + Attributes: + enabled (bool): + Whether or not master authorized networks + feature is enabled. + cidr_blocks (MutableSequence[google.cloud.orchestration.airflow.service_v1.types.MasterAuthorizedNetworksConfig.CidrBlock]): + Up to 50 external networks that could access + Kubernetes master through HTTPS. + """ + + class CidrBlock(proto.Message): + r"""CIDR block with an optional name. + + Attributes: + display_name (str): + User-defined name that identifies the CIDR + block. + cidr_block (str): + CIDR block that must be specified in CIDR + notation. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + cidr_block: str = proto.Field( + proto.STRING, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + cidr_blocks: MutableSequence[CidrBlock] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=CidrBlock, + ) class Environment(proto.Message): diff --git a/google/cloud/orchestration/airflow/service_v1/types/image_versions.py b/google/cloud/orchestration/airflow/service_v1/types/image_versions.py index d338b01..323efa1 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/image_versions.py +++ b/google/cloud/orchestration/airflow/service_v1/types/image_versions.py @@ -97,7 +97,7 @@ class ImageVersion(proto.Message): Attributes: image_version_id (str): The string identifier of the ImageVersion, in - the form: "composer-x.y.z-airflow-a.b(.c)". + the form: "composer-x.y.z-airflow-a.b.c". is_default (bool): Whether this is the default ImageVersion used by Composer during environment creation if no diff --git a/google/cloud/orchestration/airflow/service_v1/types/operations.py b/google/cloud/orchestration/airflow/service_v1/types/operations.py index dbafb35..36db2ce 100644 --- a/google/cloud/orchestration/airflow/service_v1/types/operations.py +++ b/google/cloud/orchestration/airflow/service_v1/types/operations.py @@ -69,6 +69,8 @@ class Type(proto.Enum): DELETE = 2 UPDATE = 3 CHECK = 4 + SAVE_SNAPSHOT = 5 + LOAD_SNAPSHOT = 6 state: State = proto.Field( proto.ENUM, diff --git a/google/cloud/orchestration/airflow/service_v1beta1/__init__.py b/google/cloud/orchestration/airflow/service_v1beta1/__init__.py index c4fdc7f..cef0c25 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/__init__.py @@ -23,21 +23,32 @@ from .types.environments import ( CheckUpgradeRequest, CheckUpgradeResponse, + CloudDataLineageIntegration, CreateEnvironmentRequest, DatabaseConfig, DeleteEnvironmentRequest, EncryptionConfig, Environment, EnvironmentConfig, + ExecuteAirflowCommandResponse, GetEnvironmentRequest, IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, + PollAirflowCommandResponse, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, RestartWebServerRequest, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, @@ -56,6 +67,7 @@ "ImageVersionsAsyncClient", "CheckUpgradeRequest", "CheckUpgradeResponse", + "CloudDataLineageIntegration", "CreateEnvironmentRequest", "DatabaseConfig", "DeleteEnvironmentRequest", @@ -63,6 +75,7 @@ "Environment", "EnvironmentConfig", "EnvironmentsClient", + "ExecuteAirflowCommandResponse", "GetEnvironmentRequest", "IPAllocationPolicy", "ImageVersion", @@ -71,12 +84,21 @@ "ListEnvironmentsResponse", "ListImageVersionsRequest", "ListImageVersionsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", "OperationMetadata", + "PollAirflowCommandResponse", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", "RestartWebServerRequest", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", diff --git a/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json b/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json index 3899a07..69f39fb 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json +++ b/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json @@ -35,11 +35,21 @@ "list_environments" ] }, + "LoadSnapshot": { + "methods": [ + "load_snapshot" + ] + }, "RestartWebServer": { "methods": [ "restart_web_server" ] }, + "SaveSnapshot": { + "methods": [ + "save_snapshot" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" @@ -75,11 +85,21 @@ "list_environments" ] }, + "LoadSnapshot": { + "methods": [ + "load_snapshot" + ] + }, "RestartWebServer": { "methods": [ "restart_web_server" ] }, + "SaveSnapshot": { + "methods": [ + "save_snapshot" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py index 05b0464..66540ce 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py @@ -736,7 +736,11 @@ async def sample_update_environment(): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` - field. \* ``config.webServerNetworkAccessControl`` + field. Supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerNetworkAccessControl`` + - Replace the environment's current WebServerNetworkAccessControl. @@ -767,9 +771,7 @@ async def sample_update_environment(): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom - environment variables are cleared. It is an error - to provide both this mask and a mask specifying - one or more individual environment variables. + environment variables are cleared. - ``config.softwareConfig.imageVersion`` @@ -777,11 +779,11 @@ async def sample_update_environment(): Refer to ``SoftwareConfig.image_version`` for information on how to format the new image version. Additionally, the new image version - cannot effect a version downgrade and must match - the current image version's Composer major version - and Airflow major and minor versions. Consult the - `Cloud Composer Version - List `__ + cannot effect a version downgrade, and must match + the current image version's Composer and Airflow + major versions. Consult the `Cloud Composer + version + list `__ for valid values. - ``config.softwareConfig.schedulerCount`` @@ -789,21 +791,52 @@ async def sample_update_environment(): - Horizontally scale the number of schedulers in Airflow. A positive integer not greater than the number of nodes must be provided in the - ``config.softwareConfig.schedulerCount`` field. \* - ``config.databaseConfig.machineType`` + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + + - ``config.softwareConfig.cloudDataLineageIntegration`` + + - Configuration for Cloud Data Lineage integration. + + - ``config.databaseConfig.machineType`` + - Cloud SQL machine type used by Airflow database. It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 or - db-n1-standard-16. \* - ``config.webServerConfig.machineType`` + db-n1-standard-16. Supported for Cloud Composer + environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + - Machine type on which Airflow web server is running. It has to be one of: composer-n1-webserver-2, composer-n1-webserver-4 - or composer-n1-webserver-8. \* - ``config.maintenanceWindow`` + or composer-n1-webserver-8. Supported for Cloud + Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.maintenanceWindow`` + - Maintenance window during which Cloud Composer components may be under maintenance. + - ``config.workloadsConfig`` + + - The workloads configuration settings for the GKE + cluster associated with the Cloud Composer + environment. Supported for Cloud Composer + environments in versions + composer-2.\ *.*-airflow-*.*.\* and newer. + + - ``config.environmentSize`` + + - The size of the Cloud Composer environment. + Supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and + newer. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1190,6 +1223,204 @@ async def sample_check_upgrade(): # Done; return the response. return response + async def save_snapshot( + self, + request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + async def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest, dict]]): + The request object. Request to create a snapshot of a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotResponse` + Response to SaveSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.SaveSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.save_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.SaveSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def load_snapshot( + self, + request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + async def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest, dict]]): + The request object. Request to load a snapshot into a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotResponse` + Response to LoadSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.LoadSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.load_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.LoadSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py index fad5038..149d3ac 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py @@ -967,7 +967,11 @@ def sample_update_environment(): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the ``config.nodeCount`` - field. \* ``config.webServerNetworkAccessControl`` + field. Supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerNetworkAccessControl`` + - Replace the environment's current WebServerNetworkAccessControl. @@ -998,9 +1002,7 @@ def sample_update_environment(): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom - environment variables are cleared. It is an error - to provide both this mask and a mask specifying - one or more individual environment variables. + environment variables are cleared. - ``config.softwareConfig.imageVersion`` @@ -1008,11 +1010,11 @@ def sample_update_environment(): Refer to ``SoftwareConfig.image_version`` for information on how to format the new image version. Additionally, the new image version - cannot effect a version downgrade and must match - the current image version's Composer major version - and Airflow major and minor versions. Consult the - `Cloud Composer Version - List `__ + cannot effect a version downgrade, and must match + the current image version's Composer and Airflow + major versions. Consult the `Cloud Composer + version + list `__ for valid values. - ``config.softwareConfig.schedulerCount`` @@ -1020,21 +1022,52 @@ def sample_update_environment(): - Horizontally scale the number of schedulers in Airflow. A positive integer not greater than the number of nodes must be provided in the - ``config.softwareConfig.schedulerCount`` field. \* - ``config.databaseConfig.machineType`` + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + + - ``config.softwareConfig.cloudDataLineageIntegration`` + + - Configuration for Cloud Data Lineage integration. + + - ``config.databaseConfig.machineType`` + - Cloud SQL machine type used by Airflow database. It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 or - db-n1-standard-16. \* - ``config.webServerConfig.machineType`` + db-n1-standard-16. Supported for Cloud Composer + environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + - Machine type on which Airflow web server is running. It has to be one of: composer-n1-webserver-2, composer-n1-webserver-4 - or composer-n1-webserver-8. \* - ``config.maintenanceWindow`` + or composer-n1-webserver-8. Supported for Cloud + Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.maintenanceWindow`` + - Maintenance window during which Cloud Composer components may be under maintenance. + - ``config.workloadsConfig`` + + - The workloads configuration settings for the GKE + cluster associated with the Cloud Composer + environment. Supported for Cloud Composer + environments in versions + composer-2.\ *.*-airflow-*.*.\* and newer. + + - ``config.environmentSize`` + + - The size of the Cloud Composer environment. + Supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and + newer. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1423,6 +1456,206 @@ def sample_check_upgrade(): # Done; return the response. return response + def save_snapshot( + self, + request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest, dict]): + The request object. Request to create a snapshot of a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotResponse` + Response to SaveSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.SaveSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.SaveSnapshotRequest): + request = environments.SaveSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.save_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.SaveSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + def load_snapshot( + self, + request: Optional[Union[environments.LoadSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest, dict]): + The request object. Request to load a snapshot into a + Cloud Composer environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotResponse` + Response to LoadSnapshotRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.LoadSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.LoadSnapshotRequest): + request = environments.LoadSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.load_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.LoadSnapshotResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py index fa7092c..e28de0c 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py @@ -163,6 +163,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.save_snapshot: gapic_v1.method.wrap_method( + self.save_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.load_snapshot: gapic_v1.method.wrap_method( + self.load_snapshot, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -245,6 +255,24 @@ def check_upgrade( ]: raise NotImplementedError() + @property + def save_snapshot( + self, + ) -> Callable[ + [environments.SaveSnapshotRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def load_snapshot( + self, + ) -> Callable[ + [environments.LoadSnapshotRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py index cef66e2..1fe5d26 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py @@ -431,6 +431,64 @@ def check_upgrade( ) return self._stubs["check_upgrade"] + @property + def save_snapshot( + self, + ) -> Callable[[environments.SaveSnapshotRequest], operations_pb2.Operation]: + r"""Return a callable for the save snapshot method over gRPC. + + Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + Returns: + Callable[[~.SaveSnapshotRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "save_snapshot" not in self._stubs: + self._stubs["save_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/SaveSnapshot", + request_serializer=environments.SaveSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["save_snapshot"] + + @property + def load_snapshot( + self, + ) -> Callable[[environments.LoadSnapshotRequest], operations_pb2.Operation]: + r"""Return a callable for the load snapshot method over gRPC. + + Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + Returns: + Callable[[~.LoadSnapshotRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_snapshot" not in self._stubs: + self._stubs["load_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/LoadSnapshot", + request_serializer=environments.LoadSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_snapshot"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py index a3ac7d7..4b3a149 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py @@ -449,6 +449,68 @@ def check_upgrade( ) return self._stubs["check_upgrade"] + @property + def save_snapshot( + self, + ) -> Callable[ + [environments.SaveSnapshotRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the save snapshot method over gRPC. + + Creates a snapshots of a Cloud Composer environment. + As a result of this operation, snapshot of environment's + state is stored in a location specified in the + SaveSnapshotRequest. + + Returns: + Callable[[~.SaveSnapshotRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "save_snapshot" not in self._stubs: + self._stubs["save_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/SaveSnapshot", + request_serializer=environments.SaveSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["save_snapshot"] + + @property + def load_snapshot( + self, + ) -> Callable[ + [environments.LoadSnapshotRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the load snapshot method over gRPC. + + Loads a snapshot of a Cloud Composer environment. + As a result of this operation, a snapshot of + environment's specified in LoadSnapshotRequest is loaded + into the environment. + + Returns: + Callable[[~.LoadSnapshotRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_snapshot" not in self._stubs: + self._stubs["load_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/LoadSnapshot", + request_serializer=environments.LoadSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_snapshot"] + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py b/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py index 6492ba4..d23643d 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py @@ -16,21 +16,32 @@ from .environments import ( CheckUpgradeRequest, CheckUpgradeResponse, + CloudDataLineageIntegration, CreateEnvironmentRequest, DatabaseConfig, DeleteEnvironmentRequest, EncryptionConfig, Environment, EnvironmentConfig, + ExecuteAirflowCommandResponse, GetEnvironmentRequest, IPAllocationPolicy, ListEnvironmentsRequest, ListEnvironmentsResponse, + LoadSnapshotRequest, + LoadSnapshotResponse, MaintenanceWindow, + MasterAuthorizedNetworksConfig, + NetworkingConfig, NodeConfig, + PollAirflowCommandResponse, PrivateClusterConfig, PrivateEnvironmentConfig, + RecoveryConfig, RestartWebServerRequest, + SaveSnapshotRequest, + SaveSnapshotResponse, + ScheduledSnapshotsConfig, SoftwareConfig, UpdateEnvironmentRequest, WebServerConfig, @@ -47,21 +58,32 @@ __all__ = ( "CheckUpgradeRequest", "CheckUpgradeResponse", + "CloudDataLineageIntegration", "CreateEnvironmentRequest", "DatabaseConfig", "DeleteEnvironmentRequest", "EncryptionConfig", "Environment", "EnvironmentConfig", + "ExecuteAirflowCommandResponse", "GetEnvironmentRequest", "IPAllocationPolicy", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", "MaintenanceWindow", + "MasterAuthorizedNetworksConfig", + "NetworkingConfig", "NodeConfig", + "PollAirflowCommandResponse", "PrivateClusterConfig", "PrivateEnvironmentConfig", + "RecoveryConfig", "RestartWebServerRequest", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "ScheduledSnapshotsConfig", "SoftwareConfig", "UpdateEnvironmentRequest", "WebServerConfig", diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py b/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py index c5f3293..fadb9ad 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py @@ -29,18 +29,29 @@ "DeleteEnvironmentRequest", "UpdateEnvironmentRequest", "RestartWebServerRequest", + "ExecuteAirflowCommandResponse", + "PollAirflowCommandResponse", + "SaveSnapshotRequest", + "SaveSnapshotResponse", + "LoadSnapshotRequest", + "LoadSnapshotResponse", "EnvironmentConfig", "WebServerNetworkAccessControl", "SoftwareConfig", "IPAllocationPolicy", "NodeConfig", "PrivateClusterConfig", + "NetworkingConfig", "PrivateEnvironmentConfig", "DatabaseConfig", "WebServerConfig", "EncryptionConfig", "MaintenanceWindow", "WorkloadsConfig", + "RecoveryConfig", + "ScheduledSnapshotsConfig", + "MasterAuthorizedNetworksConfig", + "CloudDataLineageIntegration", "Environment", "CheckUpgradeRequest", "CheckUpgradeResponse", @@ -279,8 +290,12 @@ class UpdateEnvironmentRequest(proto.Message): - Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 - must be provided in the ``config.nodeCount`` field. \* - ``config.webServerNetworkAccessControl`` + must be provided in the ``config.nodeCount`` field. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerNetworkAccessControl`` + - Replace the environment's current WebServerNetworkAccessControl. @@ -308,20 +323,18 @@ class UpdateEnvironmentRequest(proto.Message): - Replace all environment variables. If a replacement environment variable map is not included in ``environment``, all custom environment variables are - cleared. It is an error to provide both this mask and - a mask specifying one or more individual environment - variables. + cleared. - ``config.softwareConfig.imageVersion`` - Upgrade the version of the environment in-place. Refer to ``SoftwareConfig.image_version`` for information on how to format the new image version. Additionally, the - new image version cannot effect a version downgrade + new image version cannot effect a version downgrade, and must match the current image version's Composer - major version and Airflow major and minor versions. - Consult the `Cloud Composer Version - List `__ + and Airflow major versions. Consult the `Cloud + Composer version + list `__ for valid values. - ``config.softwareConfig.schedulerCount`` @@ -329,18 +342,47 @@ class UpdateEnvironmentRequest(proto.Message): - Horizontally scale the number of schedulers in Airflow. A positive integer not greater than the number of nodes must be provided in the - ``config.softwareConfig.schedulerCount`` field. \* - ``config.databaseConfig.machineType`` + ``config.softwareConfig.schedulerCount`` field. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-2.*.*. + + - ``config.softwareConfig.cloudDataLineageIntegration`` + + - Configuration for Cloud Data Lineage integration. + + - ``config.databaseConfig.machineType`` + - Cloud SQL machine type used by Airflow database. It has to be one of: db-n1-standard-2, db-n1-standard-4, - db-n1-standard-8 or db-n1-standard-16. \* - ``config.webServerConfig.machineType`` + db-n1-standard-8 or db-n1-standard-16. Supported for + Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.webServerConfig.machineType`` + - Machine type on which Airflow web server is running. It has to be one of: composer-n1-webserver-2, - composer-n1-webserver-4 or composer-n1-webserver-8. \* - ``config.maintenanceWindow`` + composer-n1-webserver-4 or composer-n1-webserver-8. + Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. + + - ``config.maintenanceWindow`` + - Maintenance window during which Cloud Composer components may be under maintenance. + + - ``config.workloadsConfig`` + + - The workloads configuration settings for the GKE + cluster associated with the Cloud Composer + environment. Supported for Cloud Composer environments + in versions composer-2.\ *.*-airflow-*.*.\* and newer. + + - ``config.environmentSize`` + + - The size of the Cloud Composer environment. Supported + for Cloud Composer environments in versions + composer-2.\ *.*-airflow-*.*.\* and newer. """ name: str = proto.Field( @@ -375,6 +417,207 @@ class RestartWebServerRequest(proto.Message): ) +class ExecuteAirflowCommandResponse(proto.Message): + r"""Response to ExecuteAirflowCommandRequest. + + Attributes: + execution_id (str): + The unique ID of the command execution for + polling. + pod (str): + The name of the pod where the command is + executed. + pod_namespace (str): + The namespace of the pod where the command is + executed. + error (str): + Error message. Empty if there was no error. + """ + + execution_id: str = proto.Field( + proto.STRING, + number=1, + ) + pod: str = proto.Field( + proto.STRING, + number=2, + ) + pod_namespace: str = proto.Field( + proto.STRING, + number=3, + ) + error: str = proto.Field( + proto.STRING, + number=4, + ) + + +class PollAirflowCommandResponse(proto.Message): + r"""Response to PollAirflowCommandRequest. + + Attributes: + output (MutableSequence[google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandResponse.Line]): + Output from the command execution. It may not + contain the full output and the caller may need + to poll for more lines. + output_end (bool): + Whether the command execution has finished + and there is no more output. + exit_info (google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandResponse.ExitInfo): + The result exit status of the command. + """ + + class Line(proto.Message): + r"""Contains information about a single line from logs. + + Attributes: + line_number (int): + Number of the line. + content (str): + Text content of the log line. + """ + + line_number: int = proto.Field( + proto.INT32, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + + class ExitInfo(proto.Message): + r"""Information about how a command ended. + + Attributes: + exit_code (int): + The exit code from the command execution. + error (str): + Error message. Empty if there was no error. + """ + + exit_code: int = proto.Field( + proto.INT32, + number=1, + ) + error: str = proto.Field( + proto.STRING, + number=2, + ) + + output: MutableSequence[Line] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Line, + ) + output_end: bool = proto.Field( + proto.BOOL, + number=2, + ) + exit_info: ExitInfo = proto.Field( + proto.MESSAGE, + number=3, + message=ExitInfo, + ) + + +class SaveSnapshotRequest(proto.Message): + r"""Request to create a snapshot of a Cloud Composer environment. + + Attributes: + environment (str): + The resource name of the source environment + in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + snapshot_location (str): + Location in a Cloud Storage where the + snapshot is going to be stored, e.g.: + "gs://my-bucket/snapshots". + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SaveSnapshotResponse(proto.Message): + r"""Response to SaveSnapshotRequest. + + Attributes: + snapshot_path (str): + The fully-resolved Cloud Storage path of the created + snapshot, e.g.: + "gs://my-bucket/snapshots/project_location_environment_timestamp". + This field is populated only if the snapshot creation was + successful. + """ + + snapshot_path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LoadSnapshotRequest(proto.Message): + r"""Request to load a snapshot into a Cloud Composer environment. + + Attributes: + environment (str): + The resource name of the target environment + in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + snapshot_path (str): + A Cloud Storage path to a snapshot to load, e.g.: + "gs://my-bucket/snapshots/project_location_environment_timestamp". + skip_pypi_packages_installation (bool): + Whether or not to skip installing Pypi + packages when loading the environment's state. + skip_environment_variables_setting (bool): + Whether or not to skip setting environment + variables when loading the environment's state. + skip_airflow_overrides_setting (bool): + Whether or not to skip setting Airflow + overrides when loading the environment's state. + skip_gcs_data_copying (bool): + Whether or not to skip copying Cloud Storage + data when loading the environment's state. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_path: str = proto.Field( + proto.STRING, + number=2, + ) + skip_pypi_packages_installation: bool = proto.Field( + proto.BOOL, + number=3, + ) + skip_environment_variables_setting: bool = proto.Field( + proto.BOOL, + number=4, + ) + skip_airflow_overrides_setting: bool = proto.Field( + proto.BOOL, + number=5, + ) + skip_gcs_data_copying: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class LoadSnapshotResponse(proto.Message): + r"""Response to LoadSnapshotRequest.""" + + class EnvironmentConfig(proto.Message): r"""Configuration information for an environment. @@ -391,9 +634,11 @@ class EnvironmentConfig(proto.Message): for this environment reside in a simulated directory with the given prefix. node_count (int): - The number of nodes in the Kubernetes Engine - cluster that will be used to run this - environment. + The number of nodes in the Kubernetes Engine cluster that + will be used to run this environment. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. software_config (google.cloud.orchestration.airflow.service_v1beta1.types.SoftwareConfig): The configuration settings for software inside the environment. @@ -413,8 +658,11 @@ class EnvironmentConfig(proto.Message): Cloud SQL instance used internally by Apache Airflow software. web_server_config (google.cloud.orchestration.airflow.service_v1beta1.types.WebServerConfig): - Optional. The configuration settings for the - Airflow web server App Engine instance. + Optional. The configuration settings for the Airflow web + server App Engine instance. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. airflow_uri (str): Output only. The URI of the Apache Airflow Web UI hosted within this environment (see `Airflow web @@ -454,6 +702,19 @@ class EnvironmentConfig(proto.Message): environment_size (google.cloud.orchestration.airflow.service_v1beta1.types.EnvironmentConfig.EnvironmentSize): Optional. The size of the Cloud Composer environment. + This field is supported for Cloud Composer environments in + versions composer-2.\ *.*-airflow-*.*.\* and newer. + master_authorized_networks_config (google.cloud.orchestration.airflow.service_v1beta1.types.MasterAuthorizedNetworksConfig): + Optional. The configuration options for GKE + cluster master authorized networks. By default + master authorized networks feature is: - in case + of private environment: enabled with no external + networks allowlisted. + - in case of public environment: disabled. + recovery_config (google.cloud.orchestration.airflow.service_v1beta1.types.RecoveryConfig): + Optional. The Recovery settings configuration of an + environment. + This field is supported for Cloud Composer environments in versions composer-2.\ *.*-airflow-*.*.\* and newer. """ @@ -531,6 +792,16 @@ class EnvironmentSize(proto.Enum): number=16, enum=EnvironmentSize, ) + master_authorized_networks_config: "MasterAuthorizedNetworksConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="MasterAuthorizedNetworksConfig", + ) + recovery_config: "RecoveryConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="RecoveryConfig", + ) class WebServerNetworkAccessControl(proto.Message): @@ -588,25 +859,32 @@ class SoftwareConfig(proto.Message): encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression - ``composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?``. + ``composer-([0-9]+(\.[0-9]+\.[0-9]+(-preview\.[0-9]+)?)?|latest)-airflow-([0-9]+(\.[0-9]+(\.[0-9]+)?)?)``. When used as input, the server also checks if the provided version is supported and denies the request for an unsupported version. - The Cloud Composer portion of the version is a `semantic - version `__ or ``latest``. When the - patch version is omitted, the current Cloud Composer patch - version is selected. When ``latest`` is provided instead of - an explicit version number, the server replaces ``latest`` - with the current Cloud Composer version and stores that - version number in the same field. - - The portion of the image version that follows *airflow-* is - an official Apache Airflow repository `release - name `__. - - See also `Version - List `__. + The Cloud Composer portion of the image version is a full + `semantic version `__, or an alias in + the form of major version number or ``latest``. When an + alias is provided, the server replaces it with the current + Cloud Composer version that satisfies the alias. + + The Apache Airflow portion of the image version is a full + semantic version that points to one of the supported Apache + Airflow versions, or an alias in the form of only major or + major.minor versions specified. When an alias is provided, + the server replaces it with the latest Apache Airflow + version that satisfies the alias and is supported in the + given Cloud Composer version. + + In all cases, the resolved image version is stored in the + same field. + + See also `version + list `__ + and `versioning + overview `__. airflow_config_overrides (MutableMapping[str, str]): Optional. Apache Airflow configuration properties to override. @@ -660,11 +938,23 @@ class SoftwareConfig(proto.Message): - ``SQL_REGION`` - ``SQL_USER`` python_version (str): - Optional. The major version of Python used to - run the Apache Airflow scheduler, worker, and - webserver processes. - Can be set to '2' or '3'. If not specified, the - default is '3'. Cannot be updated. + Optional. The major version of Python used to run the Apache + Airflow scheduler, worker, and webserver processes. + + Can be set to '2' or '3'. If not specified, the default is + '3'. Cannot be updated. + + This field is only supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. Environments in + newer versions always use Python major version 3. + scheduler_count (int): + Optional. The number of schedulers for Airflow. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-2.*.*. + cloud_data_lineage_integration (google.cloud.orchestration.airflow.service_v1beta1.types.CloudDataLineageIntegration): + Optional. The configuration for Cloud Data + Lineage integration. """ image_version: str = proto.Field( @@ -690,6 +980,15 @@ class SoftwareConfig(proto.Message): proto.STRING, number=6, ) + scheduler_count: int = proto.Field( + proto.INT32, + number=7, + ) + cloud_data_lineage_integration: "CloudDataLineageIntegration" = proto.Field( + proto.MESSAGE, + number=8, + message="CloudDataLineageIntegration", + ) class IPAllocationPolicy(proto.Message): @@ -700,28 +999,35 @@ class IPAllocationPolicy(proto.Message): use_ip_aliases (bool): Optional. Whether or not to enable Alias IPs in the GKE cluster. If ``true``, a VPC-native cluster is created. + + This field is only supported for Cloud Composer environments + in versions composer-1.\ *.*-airflow-*.*.*. Environments in + newer versions always use VPC-native GKE clusters. cluster_secondary_range_name (str): Optional. The name of the cluster's secondary range used to allocate IP addresses to pods. Specify either ``cluster_secondary_range_name`` or ``cluster_ipv4_cidr_block`` but not both. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. services_secondary_range_name (str): Optional. The name of the services' secondary range used to allocate IP addresses to the cluster. Specify either ``services_secondary_range_name`` or ``services_ipv4_cidr_block`` but not both. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. cluster_ipv4_cidr_block (str): Optional. The IP address range used to allocate IP addresses to pods in the cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. Set to blank to have GKE choose a range with the default size. @@ -730,7 +1036,7 @@ class IPAllocationPolicy(proto.Message): with a specific netmask. Set to a - `CIDR `__ + `CIDR `__ notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, ``192.168.0.0/16``) to pick a specific range to use. Specify @@ -740,8 +1046,9 @@ class IPAllocationPolicy(proto.Message): Optional. The IP address range of the services IP addresses in this cluster. - This field is applicable only when ``use_ip_aliases`` is - true. + For Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*, this field is applicable + only when ``use_ip_aliases`` is true. Set to blank to have GKE choose a range with the default size. @@ -750,7 +1057,7 @@ class IPAllocationPolicy(proto.Message): with a specific netmask. Set to a - `CIDR `__ + `CIDR `__ notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, ``192.168.0.0/16``) to pick a specific range to use. Specify @@ -803,6 +1110,9 @@ class NodeConfig(proto.Message): one field (``location`` or ``nodeConfig.machineType``) is specified, the location information from the specified field will be propagated to the unspecified field. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. machine_type (str): Optional. The Compute Engine `machine type `__ used for cluster @@ -827,6 +1137,9 @@ class NodeConfig(proto.Message): If this field is unspecified, the ``machineTypeId`` defaults to "n1-standard-1". + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. network (str): Optional. The Compute Engine network to be used for machine communications, specified as a `relative resource @@ -851,14 +1164,20 @@ class NodeConfig(proto.Message): also be provided, and the subnetwork must belong to the enclosing environment's project and location. disk_size_gb (int): - Optional. The disk size in GB used for node - VMs. Minimum size is 20GB. If unspecified, - defaults to 100GB. Cannot be updated. + Optional. The disk size in GB used for node VMs. Minimum + size is 30GB. If unspecified, defaults to 100GB. Cannot be + updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. oauth_scopes (MutableSequence[str]): Optional. The set of Google API scopes to be made available on all node VMs. If ``oauth_scopes`` is empty, defaults to ["https://www.googleapis.com/auth/cloud-platform"]. Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. service_account (str): Optional. The Google Cloud Platform Service Account to be used by the workloads. If a @@ -871,6 +1190,9 @@ class NodeConfig(proto.Message): network firewalls. Each tag within the list must comply with `RFC1035 `__. Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. ip_allocation_policy (google.cloud.orchestration.airflow.service_v1beta1.types.IPAllocationPolicy): Optional. The IPAllocationPolicy fields for the GKE cluster. @@ -886,6 +1208,17 @@ class NodeConfig(proto.Message): more information, see [Optimizing IP address allocation] (https://cloud.google.com/kubernetes-engine/docs/how-to/flexible-pod-cidr). Cannot be updated. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. + enable_ip_masq_agent (bool): + Optional. Deploys 'ip-masq-agent' daemon set + in the GKE cluster and defines + nonMasqueradeCIDRs equals to pod IP range so IP + masquerading is used for all destination + addresses, except between pods traffic. + See: + https://cloud.google.com/kubernetes-engine/docs/how-to/ip-masquerade-agent """ location: str = proto.Field( @@ -929,6 +1262,10 @@ class NodeConfig(proto.Message): proto.INT32, number=10, ) + enable_ip_masq_agent: bool = proto.Field( + proto.BOOL, + number=11, + ) class PrivateClusterConfig(proto.Message): @@ -968,6 +1305,34 @@ class PrivateClusterConfig(proto.Message): ) +class NetworkingConfig(proto.Message): + r"""Configuration options for networking connections in the + Composer 2 environment. + + Attributes: + connection_type (google.cloud.orchestration.airflow.service_v1beta1.types.NetworkingConfig.ConnectionType): + Optional. Indicates the user requested + specifc connection type between Tenant and + Customer projects. You cannot set networking + connection type in public IP environment. + """ + + class ConnectionType(proto.Enum): + r"""Represents connection type between Composer environment in + Customer Project and the corresponding Tenant project, from a + predefined list of available connection modes. + """ + CONNECTION_TYPE_UNSPECIFIED = 0 + VPC_PEERING = 1 + PRIVATE_SERVICE_CONNECT = 2 + + connection_type: ConnectionType = proto.Field( + proto.ENUM, + number=1, + enum=ConnectionType, + ) + + class PrivateEnvironmentConfig(proto.Message): r"""The configuration information for configuring a Private IP Cloud Composer environment. @@ -976,7 +1341,9 @@ class PrivateEnvironmentConfig(proto.Message): enable_private_environment (bool): Optional. If ``true``, a Private IP Cloud Composer environment is created. If this field is set to true, - ``IPAllocationPolicy.use_ip_aliases`` must be set to true . + ``IPAllocationPolicy.use_ip_aliases`` must be set to true + for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. private_cluster_config (google.cloud.orchestration.airflow.service_v1beta1.types.PrivateClusterConfig): Optional. Configuration for the private GKE cluster for a Private IP Cloud Composer @@ -986,13 +1353,19 @@ class PrivateEnvironmentConfig(proto.Message): will be reserved. Needs to be disjoint from private_cluster_config.master_ipv4_cidr_block and cloud_sql_ipv4_cidr_block. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. cloud_sql_ipv4_cidr_block (str): Optional. The CIDR block from which IP range in tenant project will be reserved for Cloud SQL. Needs to be disjoint from web_server_ipv4_cidr_block web_server_ipv4_reserved_range (str): - Output only. The IP range reserved for the - tenant project's App Engine VMs. + Output only. The IP range reserved for the tenant project's + App Engine VMs. + + This field is supported for Cloud Composer environments in + versions composer-1.\ *.*-airflow-*.*.*. cloud_composer_network_ipv4_cidr_block (str): Optional. The CIDR block from which IP range for Cloud Composer Network in tenant project will be reserved. Needs @@ -1008,6 +1381,21 @@ class PrivateEnvironmentConfig(proto.Message): This field is supported for Cloud Composer environments in versions composer-2.\ *.*-airflow-*.*.\* and newer. + enable_privately_used_public_ips (bool): + Optional. When enabled, IPs from public (non-RFC1918) ranges + can be used for + ``IPAllocationPolicy.cluster_ipv4_cidr_block`` and + ``IPAllocationPolicy.service_ipv4_cidr_block``. + cloud_composer_connection_subnetwork (str): + Optional. When specified, the environment + will use Private Service Connect instead of VPC + peerings to connect to Cloud SQL in the Tenant + Project, and the PSC endpoint in the Customer + Project will use an IP address from this + subnetwork. + networking_config (google.cloud.orchestration.airflow.service_v1beta1.types.NetworkingConfig): + Optional. Configuration for the network + connections configuration in the environment. """ enable_private_environment: bool = proto.Field( @@ -1039,6 +1427,19 @@ class PrivateEnvironmentConfig(proto.Message): proto.STRING, number=8, ) + enable_privately_used_public_ips: bool = proto.Field( + proto.BOOL, + number=6, + ) + cloud_composer_connection_subnetwork: str = proto.Field( + proto.STRING, + number=9, + ) + networking_config: "NetworkingConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="NetworkingConfig", + ) class DatabaseConfig(proto.Message): @@ -1047,11 +1448,11 @@ class DatabaseConfig(proto.Message): Attributes: machine_type (str): - Optional. Cloud SQL machine type used by - Airflow database. It has to be one of: - db-n1-standard-2, db-n1-standard-4, - db-n1-standard-8 or db-n1-standard-16. If not - specified, db-n1-standard-2 will be used. + Optional. Cloud SQL machine type used by Airflow database. + It has to be one of: db-n1-standard-2, db-n1-standard-4, + db-n1-standard-8 or db-n1-standard-16. If not specified, + db-n1-standard-2 will be used. Supported for Cloud Composer + environments in versions composer-1.\ *.*-airflow-*.*.*. """ machine_type: str = proto.Field( @@ -1061,8 +1462,9 @@ class DatabaseConfig(proto.Message): class WebServerConfig(proto.Message): - r"""The configuration settings for the Airflow web server App - Engine instance. + r"""The configuration settings for the Airflow web server App Engine + instance. Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. Attributes: machine_type (str): @@ -1083,8 +1485,9 @@ class WebServerConfig(proto.Message): class EncryptionConfig(proto.Message): - r"""The encryption options for the Cloud Composer environment and - its dependencies. + r"""The encryption options for the Cloud Composer environment and its + dependencies. Supported for Cloud Composer environments in versions + composer-1.\ *.*-airflow-*.*.*. Attributes: kms_key_name (str): @@ -1162,6 +1565,9 @@ class WorkloadsConfig(proto.Message): server. worker (google.cloud.orchestration.airflow.service_v1beta1.types.WorkloadsConfig.WorkerResource): Optional. Resources used by Airflow workers. + triggerer (google.cloud.orchestration.airflow.service_v1beta1.types.WorkloadsConfig.TriggererResource): + Optional. Resources used by Airflow + triggerers. """ class SchedulerResource(proto.Message): @@ -1268,6 +1674,33 @@ class WorkerResource(proto.Message): number=5, ) + class TriggererResource(proto.Message): + r"""Configuration for resources used by Airflow triggerers. + + Attributes: + count (int): + Optional. The number of triggerers. + cpu (float): + Optional. CPU request and limit for a single + Airflow triggerer replica. + memory_gb (float): + Optional. Memory (GB) request and limit for a + single Airflow triggerer replica. + """ + + count: int = proto.Field( + proto.INT32, + number=1, + ) + cpu: float = proto.Field( + proto.FLOAT, + number=2, + ) + memory_gb: float = proto.Field( + proto.FLOAT, + number=3, + ) + scheduler: SchedulerResource = proto.Field( proto.MESSAGE, number=1, @@ -1283,6 +1716,128 @@ class WorkerResource(proto.Message): number=3, message=WorkerResource, ) + triggerer: TriggererResource = proto.Field( + proto.MESSAGE, + number=4, + message=TriggererResource, + ) + + +class RecoveryConfig(proto.Message): + r"""The Recovery settings of an environment. + + Attributes: + scheduled_snapshots_config (google.cloud.orchestration.airflow.service_v1beta1.types.ScheduledSnapshotsConfig): + Optional. The configuration for scheduled + snapshot creation mechanism. + """ + + scheduled_snapshots_config: "ScheduledSnapshotsConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="ScheduledSnapshotsConfig", + ) + + +class ScheduledSnapshotsConfig(proto.Message): + r"""The configuration for scheduled snapshot creation mechanism. + + Attributes: + enabled (bool): + Optional. Whether scheduled snapshots + creation is enabled. + snapshot_location (str): + Optional. The Cloud Storage location for + storing automatically created snapshots. + snapshot_creation_schedule (str): + Optional. The cron expression representing + the time when snapshots creation mechanism runs. + This field is subject to additional validation + around frequency of execution. + time_zone (str): + Optional. Time zone that sets the context to interpret + snapshot_creation_schedule. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + snapshot_location: str = proto.Field( + proto.STRING, + number=6, + ) + snapshot_creation_schedule: str = proto.Field( + proto.STRING, + number=3, + ) + time_zone: str = proto.Field( + proto.STRING, + number=5, + ) + + +class MasterAuthorizedNetworksConfig(proto.Message): + r"""Configuration options for the master authorized networks + feature. Enabled master authorized networks will disallow all + external traffic to access Kubernetes master through HTTPS + except traffic from the given CIDR blocks, Google Compute Engine + Public IPs and Google Prod IPs. + + Attributes: + enabled (bool): + Whether or not master authorized networks + feature is enabled. + cidr_blocks (MutableSequence[google.cloud.orchestration.airflow.service_v1beta1.types.MasterAuthorizedNetworksConfig.CidrBlock]): + Up to 50 external networks that could access + Kubernetes master through HTTPS. + """ + + class CidrBlock(proto.Message): + r"""CIDR block with an optional name. + + Attributes: + display_name (str): + User-defined name that identifies the CIDR + block. + cidr_block (str): + CIDR block that must be specified in CIDR + notation. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + cidr_block: str = proto.Field( + proto.STRING, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + cidr_blocks: MutableSequence[CidrBlock] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=CidrBlock, + ) + + +class CloudDataLineageIntegration(proto.Message): + r"""Configuration for Cloud Data Lineage integration. + + Attributes: + enabled (bool): + Optional. Whether or not Cloud Data Lineage + integration is enabled. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) class Environment(proto.Message): @@ -1383,25 +1938,32 @@ class CheckUpgradeRequest(proto.Message): encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression - ``composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?``. + ``composer-([0-9]+(\.[0-9]+\.[0-9]+(-preview\.[0-9]+)?)?|latest)-airflow-([0-9]+(\.[0-9]+(\.[0-9]+)?)?)``. When used as input, the server also checks if the provided version is supported and denies the request for an unsupported version. - The Cloud Composer portion of the version is a `semantic - version `__ or ``latest``. When the - patch version is omitted, the current Cloud Composer patch - version is selected. When ``latest`` is provided instead of - an explicit version number, the server replaces ``latest`` - with the current Cloud Composer version and stores that - version number in the same field. - - The portion of the image version that follows ``airflow-`` - is an official Apache Airflow repository `release - name `__. - - See also [Version List] - (/composer/docs/concepts/versioning/composer-versions). + The Cloud Composer portion of the image version is a full + `semantic version `__, or an alias in + the form of major version number or ``latest``. When an + alias is provided, the server replaces it with the current + Cloud Composer version that satisfies the alias. + + The Apache Airflow portion of the image version is a full + semantic version that points to one of the supported Apache + Airflow versions, or an alias in the form of only major or + major.minor versions specified. When an alias is provided, + the server replaces it with the latest Apache Airflow + version that satisfies the alias and is supported in the + given Cloud Composer version. + + In all cases, the resolved image version is stored in the + same field. + + See also `version + list `__ + and `versioning + overview `__. """ environment: str = proto.Field( diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py b/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py index c23f704..557e3b5 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/image_versions.py @@ -97,7 +97,7 @@ class ImageVersion(proto.Message): Attributes: image_version_id (str): The string identifier of the ImageVersion, in - the form: "composer-x.y.z-airflow-a.b(.c)". + the form: "composer-x.y.z-airflow-a.b.c". is_default (bool): Whether this is the default ImageVersion used by Composer during environment creation if no diff --git a/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py b/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py index 2f1bd5c..269b715 100644 --- a/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py +++ b/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py @@ -67,6 +67,8 @@ class Type(proto.Enum): DELETE = 2 UPDATE = 3 CHECK = 4 + SAVE_SNAPSHOT = 5 + LOAD_SNAPSHOT = 6 state: State = proto.Field( proto.ENUM, diff --git a/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py new file mode 100644 index 0000000..b7776e0 --- /dev/null +++ b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_LoadSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_LoadSnapshot_async] diff --git a/samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py new file mode 100644 index 0000000..9d32838 --- /dev/null +++ b/samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_LoadSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_LoadSnapshot_sync] diff --git a/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py new file mode 100644 index 0000000..ec387fe --- /dev/null +++ b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_SaveSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_SaveSnapshot_async] diff --git a/samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py new file mode 100644 index 0000000..09882bf --- /dev/null +++ b/samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_SaveSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_SaveSnapshot_sync] diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py new file mode 100644 index 0000000..fbd0dae --- /dev/null +++ b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_LoadSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_LoadSnapshot_async] diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py new file mode 100644 index 0000000..6424bbd --- /dev/null +++ b/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_LoadSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_LoadSnapshot_sync] diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py new file mode 100644 index 0000000..de0f02a --- /dev/null +++ b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_SaveSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_SaveSnapshot_async] diff --git a/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py new file mode 100644 index 0000000..88e01f0 --- /dev/null +++ b/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_SaveSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_SaveSnapshot_sync] diff --git a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json index 5ebdced..afcb961 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -663,6 +663,312 @@ ], "title": "composer_v1_generated_environments_list_environments_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1_generated_environments_load_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_LoadSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_load_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1_generated_environments_load_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_LoadSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_load_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1_generated_environments_save_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_SaveSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_save_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1_generated_environments_save_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_SaveSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_save_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json index b13cbb3..5ba2ed3 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json @@ -816,6 +816,159 @@ ], "title": "composer_v1beta1_generated_environments_list_environments_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1beta1_generated_environments_load_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_LoadSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_load_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1beta1_generated_environments_load_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_LoadSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_load_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { @@ -969,6 +1122,159 @@ ], "title": "composer_v1beta1_generated_environments_restart_web_server_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1beta1_generated_environments_save_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_SaveSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_save_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1beta1_generated_environments_save_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_SaveSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_save_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/scripts/fixup_service_v1_keywords.py b/scripts/fixup_service_v1_keywords.py index bdacbaf..31a0db1 100644 --- a/scripts/fixup_service_v1_keywords.py +++ b/scripts/fixup_service_v1_keywords.py @@ -44,6 +44,8 @@ class serviceCallTransformer(cst.CSTTransformer): 'get_environment': ('name', ), 'list_environments': ('parent', 'page_size', 'page_token', ), 'list_image_versions': ('parent', 'page_size', 'page_token', 'include_past_releases', ), + 'load_snapshot': ('environment', 'snapshot_path', 'skip_pypi_packages_installation', 'skip_environment_variables_setting', 'skip_airflow_overrides_setting', 'skip_gcs_data_copying', ), + 'save_snapshot': ('environment', 'snapshot_location', ), 'update_environment': ('name', 'environment', 'update_mask', ), } diff --git a/scripts/fixup_service_v1beta1_keywords.py b/scripts/fixup_service_v1beta1_keywords.py index d070e52..a4ae0cb 100644 --- a/scripts/fixup_service_v1beta1_keywords.py +++ b/scripts/fixup_service_v1beta1_keywords.py @@ -45,7 +45,9 @@ class serviceCallTransformer(cst.CSTTransformer): 'get_environment': ('name', ), 'list_environments': ('parent', 'page_size', 'page_token', ), 'list_image_versions': ('parent', 'page_size', 'page_token', 'include_past_releases', ), + 'load_snapshot': ('environment', 'snapshot_path', 'skip_pypi_packages_installation', 'skip_environment_variables_setting', 'skip_airflow_overrides_setting', 'skip_gcs_data_copying', ), 'restart_web_server': ('name', ), + 'save_snapshot': ('environment', 'snapshot_location', ), 'update_environment': ('update_mask', 'name', 'environment', ), } diff --git a/tests/unit/gapic/service_v1/test_environments.py b/tests/unit/gapic/service_v1/test_environments.py index df22a3a..7926144 100644 --- a/tests/unit/gapic/service_v1/test_environments.py +++ b/tests/unit/gapic/service_v1/test_environments.py @@ -2112,6 +2112,294 @@ async def test_delete_environment_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + environments.SaveSnapshotRequest, + dict, + ], +) +def test_save_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_save_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + client.save_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + +@pytest.mark.asyncio +async def test_save_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.SaveSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_save_snapshot_async_from_dict(): + await test_save_snapshot_async(request_type=dict) + + +def test_save_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_save_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.LoadSnapshotRequest, + dict, + ], +) +def test_load_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_load_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + client.load_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + +@pytest.mark.asyncio +async def test_load_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.LoadSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_load_snapshot_async_from_dict(): + await test_load_snapshot_async(request_type=dict) + + +def test_load_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_load_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.EnvironmentsGrpcTransport( @@ -2254,6 +2542,8 @@ def test_environments_base_transport(): "list_environments", "update_environment", "delete_environment", + "save_snapshot", + "load_snapshot", ) for method in methods: with pytest.raises(NotImplementedError): diff --git a/tests/unit/gapic/service_v1beta1/test_environments.py b/tests/unit/gapic/service_v1beta1/test_environments.py index b878086..74b036d 100644 --- a/tests/unit/gapic/service_v1beta1/test_environments.py +++ b/tests/unit/gapic/service_v1beta1/test_environments.py @@ -2413,6 +2413,294 @@ async def test_check_upgrade_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + environments.SaveSnapshotRequest, + dict, + ], +) +def test_save_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_save_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + client.save_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + +@pytest.mark.asyncio +async def test_save_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.SaveSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_save_snapshot_async_from_dict(): + await test_save_snapshot_async(request_type=dict) + + +def test_save_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_save_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.LoadSnapshotRequest, + dict, + ], +) +def test_load_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_load_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + client.load_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + +@pytest.mark.asyncio +async def test_load_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.LoadSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_load_snapshot_async_from_dict(): + await test_load_snapshot_async(request_type=dict) + + +def test_load_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_load_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.EnvironmentsGrpcTransport( @@ -2557,6 +2845,8 @@ def test_environments_base_transport(): "delete_environment", "restart_web_server", "check_upgrade", + "save_snapshot", + "load_snapshot", ) for method in methods: with pytest.raises(NotImplementedError):