- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please + As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
{% block body %} {% endblock %} diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst deleted file mode 100644 index 1cb29d4c..00000000 --- a/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpcio` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.Pool` or - :class:`multiprocessing.Process`. diff --git a/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py b/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py index f7c8d82e..f16463bb 100644 --- a/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py +++ b/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_admin_v1/proto/datastore_admin.proto - +"""Generated protocol buffer code.""" from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message diff --git a/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py b/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py index 54b53ea1..177889e1 100644 --- a/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py +++ b/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py @@ -1,4 +1,5 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" import grpc from google.cloud.datastore_admin_v1.proto import ( @@ -16,72 +17,72 @@ class DatastoreAdminStub(object): """Google Cloud Datastore Admin API - The Datastore Admin API provides several admin services for Cloud Datastore. + The Datastore Admin API provides several admin services for Cloud Datastore. - ----------------------------------------------------------------------------- - ## Concepts + ----------------------------------------------------------------------------- + ## Concepts - Project, namespace, kind, and entity as defined in the Google Cloud Datastore - API. + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. - Operation: An Operation represents work being performed in the background. + Operation: An Operation represents work being performed in the background. - EntityFilter: Allows specifying a subset of entities in a project. This is - specified as a combination of kinds and namespaces (either or both of which - may be all). + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). - ----------------------------------------------------------------------------- - ## Services + ----------------------------------------------------------------------------- + ## Services - # Export/Import + # Export/Import - The Export/Import service provides the ability to copy all or a subset of - entities to/from Google Cloud Storage. + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. - Exported data may be imported into Cloud Datastore for any Google Cloud - Platform project. It is not restricted to the export source project. It is - possible to export from one project and then import into another. + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. - Exported data can also be loaded into Google BigQuery for analysis. + Exported data can also be loaded into Google BigQuery for analysis. - Exports and imports are performed asynchronously. An Operation resource is - created for each export/import. The state (including any errors encountered) - of the export/import may be queried via the Operation resource. + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. - # Index + # Index - The index service manages Cloud Datastore composite indexes. + The index service manages Cloud Datastore composite indexes. - Index creation and deletion are performed asynchronously. - An Operation resource is created for each such asynchronous operation. - The state of the operation (including any errors encountered) - may be queried via the Operation resource. + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. - # Operation + # Operation - The Operations collection provides a record of actions performed for the - specified project (including any operations in progress). Operations are not - created directly but through calls on other collections or resources. + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. - An operation that is not yet done may be cancelled. The request to cancel is - asynchronous and the operation may continue to run for some time after the - request to cancel is made. + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. - An operation that is done may be deleted so that it is no longer listed as - part of the Operation collection. + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. - ListOperations returns all pending operations, but not completed operations. + ListOperations returns all pending operations, but not completed operations. - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. - """ + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.ExportEntities = channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString, @@ -108,103 +109,103 @@ class DatastoreAdminServicer(object): """Google Cloud Datastore Admin API - The Datastore Admin API provides several admin services for Cloud Datastore. + The Datastore Admin API provides several admin services for Cloud Datastore. - ----------------------------------------------------------------------------- - ## Concepts + ----------------------------------------------------------------------------- + ## Concepts - Project, namespace, kind, and entity as defined in the Google Cloud Datastore - API. + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. - Operation: An Operation represents work being performed in the background. + Operation: An Operation represents work being performed in the background. - EntityFilter: Allows specifying a subset of entities in a project. This is - specified as a combination of kinds and namespaces (either or both of which - may be all). + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). - ----------------------------------------------------------------------------- - ## Services + ----------------------------------------------------------------------------- + ## Services - # Export/Import + # Export/Import - The Export/Import service provides the ability to copy all or a subset of - entities to/from Google Cloud Storage. + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. - Exported data may be imported into Cloud Datastore for any Google Cloud - Platform project. It is not restricted to the export source project. It is - possible to export from one project and then import into another. + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. - Exported data can also be loaded into Google BigQuery for analysis. + Exported data can also be loaded into Google BigQuery for analysis. - Exports and imports are performed asynchronously. An Operation resource is - created for each export/import. The state (including any errors encountered) - of the export/import may be queried via the Operation resource. + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. - # Index + # Index - The index service manages Cloud Datastore composite indexes. + The index service manages Cloud Datastore composite indexes. - Index creation and deletion are performed asynchronously. - An Operation resource is created for each such asynchronous operation. - The state of the operation (including any errors encountered) - may be queried via the Operation resource. + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. - # Operation + # Operation - The Operations collection provides a record of actions performed for the - specified project (including any operations in progress). Operations are not - created directly but through calls on other collections or resources. + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. - An operation that is not yet done may be cancelled. The request to cancel is - asynchronous and the operation may continue to run for some time after the - request to cancel is made. + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. - An operation that is done may be deleted so that it is no longer listed as - part of the Operation collection. + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. - ListOperations returns all pending operations, but not completed operations. + ListOperations returns all pending operations, but not completed operations. - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. - """ + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ def ExportEntities(self, request, context): """Exports a copy of all or a subset of entities from Google Cloud Datastore - to another storage system, such as Google Cloud Storage. Recent updates to - entities may not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed via the - Operation resource that is created. The output of an export may only be - used once the associated operation is done. If an export operation is - cancelled before completion it may leave partial data behind in Google - Cloud Storage. - """ + to another storage system, such as Google Cloud Storage. Recent updates to + entities may not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed via the + Operation resource that is created. The output of an export may only be + used once the associated operation is done. If an export operation is + cancelled before completion it may leave partial data behind in Google + Cloud Storage. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ImportEntities(self, request, context): """Imports entities into Google Cloud Datastore. Existing entities with the - same key are overwritten. The import occurs in the background and its - progress can be monitored and managed via the Operation resource that is - created. If an ImportEntities operation is cancelled, it is possible - that a subset of the data has already been imported to Cloud Datastore. - """ + same key are overwritten. The import occurs in the background and its + progress can be monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it is possible + that a subset of the data has already been imported to Cloud Datastore. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetIndex(self, request, context): """Gets an index. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListIndexes(self, request, context): """Lists the indexes that match the specified filters. Datastore uses an - eventually consistent query to fetch the list of indexes and may - occasionally return stale results. - """ + eventually consistent query to fetch the list of indexes and may + occasionally return stale results. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") @@ -237,3 +238,177 @@ def add_DatastoreAdminServicer_to_server(servicer, server): "google.datastore.admin.v1.DatastoreAdmin", rpc_method_handlers ) server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class DatastoreAdmin(object): + """Google Cloud Datastore Admin API + + + The Datastore Admin API provides several admin services for Cloud Datastore. + + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. + + Operation: An Operation represents work being performed in the background. + + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. + + Exported data can also be loaded into Google BigQuery for analysis. + + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. + + # Index + + The index service manages Cloud Datastore composite indexes. + + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. + + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. + + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. + + ListOperations returns all pending operations, but not completed operations. + + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + @staticmethod + def ExportEntities( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ImportEntities( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetIndex( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.SerializeToString, + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ListIndexes( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.SerializeToString, + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/google/cloud/datastore_admin_v1/proto/index_pb2.py b/google/cloud/datastore_admin_v1/proto/index_pb2.py index 41f17a3f..c1ccb034 100644 --- a/google/cloud/datastore_admin_v1/proto/index_pb2.py +++ b/google/cloud/datastore_admin_v1/proto/index_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_admin_v1/proto/index.proto - +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py b/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py index 07cb78fe..8a939394 100644 --- a/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py +++ b/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py @@ -1,2 +1,3 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" import grpc diff --git a/google/cloud/datastore_v1/gapic/datastore_client.py b/google/cloud/datastore_v1/gapic/datastore_client.py index a4e6d79b..5f9b530f 100644 --- a/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/google/cloud/datastore_v1/gapic/datastore_client.py @@ -548,8 +548,8 @@ def commit( self, project_id, mode=None, - transaction=None, mutations=None, + transaction=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, diff --git a/google/cloud/datastore_v1/proto/datastore_pb2.py b/google/cloud/datastore_v1/proto/datastore_pb2.py index 3f5266e2..cf7a3cfd 100644 --- a/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/datastore.proto - +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py b/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py index 5de1128a..5ff5d9e1 100644 --- a/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py +++ b/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py @@ -1,4 +1,5 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" import grpc from google.cloud.datastore_v1.proto import ( @@ -8,20 +9,20 @@ class DatastoreStub(object): """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. - """ + """ def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.Lookup = channel.unary_unary( "/google.datastore.v1.Datastore/Lookup", request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupRequest.SerializeToString, @@ -61,62 +62,62 @@ def __init__(self, channel): class DatastoreServicer(object): """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. - """ + """ def Lookup(self, request, context): """Looks up entities by key. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def RunQuery(self, request, context): """Queries for entities. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def BeginTransaction(self, request, context): """Begins a new transaction. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def Commit(self, request, context): """Commits a transaction, optionally creating, deleting or modifying some - entities. - """ + entities. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def Rollback(self, request, context): """Rolls back a transaction. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def AllocateIds(self, request, context): """Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - """ + before it is inserted. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ReserveIds(self, request, context): """Prevents the supplied keys' IDs from being auto-allocated by Cloud - Datastore. - """ + Datastore. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") @@ -164,3 +165,204 @@ def add_DatastoreServicer_to_server(servicer, server): "google.datastore.v1.Datastore", rpc_method_handlers ) server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class Datastore(object): + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + + @staticmethod + def Lookup( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.v1.Datastore/Lookup", + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupRequest.SerializeToString, + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def RunQuery( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.v1.Datastore/RunQuery", + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryRequest.SerializeToString, + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def BeginTransaction( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.v1.Datastore/BeginTransaction", + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionRequest.SerializeToString, + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def Commit( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.v1.Datastore/Commit", + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitRequest.SerializeToString, + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def Rollback( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.v1.Datastore/Rollback", + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackRequest.SerializeToString, + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def AllocateIds( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.v1.Datastore/AllocateIds", + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsRequest.SerializeToString, + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ReserveIds( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.v1.Datastore/ReserveIds", + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsRequest.SerializeToString, + google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/google/cloud/datastore_v1/proto/entity_pb2.py b/google/cloud/datastore_v1/proto/entity_pb2.py index 4df7bbb6..43321289 100644 --- a/google/cloud/datastore_v1/proto/entity_pb2.py +++ b/google/cloud/datastore_v1/proto/entity_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/entity.proto - +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/google/cloud/datastore_v1/proto/entity_pb2_grpc.py b/google/cloud/datastore_v1/proto/entity_pb2_grpc.py index 07cb78fe..8a939394 100644 --- a/google/cloud/datastore_v1/proto/entity_pb2_grpc.py +++ b/google/cloud/datastore_v1/proto/entity_pb2_grpc.py @@ -1,2 +1,3 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" import grpc diff --git a/google/cloud/datastore_v1/proto/query_pb2.py b/google/cloud/datastore_v1/proto/query_pb2.py index cf915c84..e3bd1141 100644 --- a/google/cloud/datastore_v1/proto/query_pb2.py +++ b/google/cloud/datastore_v1/proto/query_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/query.proto - +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/google/cloud/datastore_v1/proto/query_pb2_grpc.py b/google/cloud/datastore_v1/proto/query_pb2_grpc.py index 07cb78fe..8a939394 100644 --- a/google/cloud/datastore_v1/proto/query_pb2_grpc.py +++ b/google/cloud/datastore_v1/proto/query_pb2_grpc.py @@ -1,2 +1,3 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" import grpc diff --git a/synth.metadata b/synth.metadata index 1fa18dd7..865b99bc 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,23 +3,23 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-datastore.git", - "sha": "83c636efc6e5bd02bd8dc614e4114f9477c74972" + "remote": "git@github.com:googleapis/python-datastore", + "sha": "f822b98873c829d4ae01d3de1b0d58e0076948fd" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "cf41866c6f14f10a07aa1e2a1260fc0a2727d889", - "internalRef": "317812187" + "sha": "5202cfe3e5c2907a1a21a4c6d4bd0812029b6aa3", + "internalRef": "319247865" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "cf2eff09d0f5319a4dc5cdce2b6356d85af4a798" + "sha": "303271797a360f8a439203413f13a160f2f5b3b4" } } ], diff --git a/synth.py b/synth.py index 49e9f694..9264093c 100644 --- a/synth.py +++ b/synth.py @@ -45,10 +45,37 @@ s.move(library / "google/cloud/datastore_admin_v1/proto") s.move(library / "google/cloud/datastore_admin_v1/gapic") +# TODO(busunkim): Remove during the microgenerator transition. +# This re-orders the parameters to avoid breaking existing code. +num = s.replace( +"google/**/datastore_client.py", +"""def commit\( +\s+self, +\s+project_id, +\s+mode=None, +\s+transaction=None, +\s+mutations=None, +\s+retry=google\.api_core\.gapic_v1\.method\.DEFAULT, +\s+timeout=google\.api_core\.gapic_v1\.method\.DEFAULT, +\s+metadata=None\):""", +"""def commit( + self, + project_id, + mode=None, + mutations=None, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ):""" +) + +if num != 1: + raise Exception("Required replacement not made.") # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=99) -s.move(templated_files, excludes=["docs/conf.py"]) +s.move(templated_files, excludes=["docs/conf.py", "docs/multiprocessing.rst"]) s.shell.run(["nox", "-s", "blacken"], hide_output=False) From bd47d40dd985cd460daa2d0e9d641bcf667e40d0 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Wed, 1 Jul 2020 21:52:28 +0000 Subject: [PATCH 24/24] docs: use older sphinx --- noxfile.py | 2 +- synth.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index 7698c37d..187124ab 100644 --- a/noxfile.py +++ b/noxfile.py @@ -145,7 +145,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/synth.py b/synth.py index 9264093c..36b5150a 100644 --- a/synth.py +++ b/synth.py @@ -78,4 +78,6 @@ templated_files = common.py_library(unit_cov_level=97, cov_level=99) s.move(templated_files, excludes=["docs/conf.py", "docs/multiprocessing.rst"]) +s.replace("noxfile.py", """["']sphinx['"]""", '''"sphinx<3.0.0"''') + s.shell.run(["nox", "-s", "blacken"], hide_output=False)