Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit ce7d573

Browse filesBrowse files
committed
Merge pull request #1330 from dhermes/replace-dataset-id-with-project
Replace dataset id with project in datastore
2 parents 481b467 + b1d4895 commit ce7d573
Copy full SHA for ce7d573
Expand file treeCollapse file tree

19 files changed

+711
-719
lines changed

‎gcloud/datastore/__init__.py

Copy file name to clipboardExpand all lines: gcloud/datastore/__init__.py
+2-2Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,9 @@
3030
API.
3131
3232
- :class:`gcloud.datastore.client.Client`
33-
which represents a dataset ID (string) and namespace (string) bundled with
33+
which represents a project (string) and namespace (string) bundled with
3434
a connection and has convenience methods for constructing objects with that
35-
dataset ID / namespace.
35+
project / namespace.
3636
3737
- :class:`gcloud.datastore.entity.Entity`
3838
which represents a single entity in the datastore

‎gcloud/datastore/batch.py

Copy file name to clipboardExpand all lines: gcloud/datastore/batch.py
+12-12Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
"""
2323

2424
from gcloud.datastore import helpers
25-
from gcloud.datastore.key import _dataset_ids_equal
25+
from gcloud.datastore.key import _projects_equal
2626
from gcloud.datastore._generated import datastore_pb2 as _datastore_pb2
2727

2828

@@ -73,13 +73,13 @@ def current(self):
7373
return self._client.current_batch
7474

7575
@property
76-
def dataset_id(self):
77-
"""Getter for dataset ID in which the batch will run.
76+
def project(self):
77+
"""Getter for project in which the batch will run.
7878
7979
:rtype: :class:`str`
80-
:returns: The dataset ID in which the batch will run.
80+
:returns: The project in which the batch will run.
8181
"""
82-
return self._client.dataset_id
82+
return self._client.project
8383

8484
@property
8585
def namespace(self):
@@ -167,13 +167,13 @@ def put(self, entity):
167167
:param entity: the entity to be saved.
168168
169169
:raises: ValueError if entity has no key assigned, or if the key's
170-
``dataset_id`` does not match ours.
170+
``project`` does not match ours.
171171
"""
172172
if entity.key is None:
173173
raise ValueError("Entity must have a key")
174174

175-
if not _dataset_ids_equal(self.dataset_id, entity.key.dataset_id):
176-
raise ValueError("Key must be from same dataset as batch")
175+
if not _projects_equal(self.project, entity.key.project):
176+
raise ValueError("Key must be from same project as batch")
177177

178178
if entity.key.is_partial:
179179
entity_pb = self._add_partial_key_entity_pb()
@@ -190,13 +190,13 @@ def delete(self, key):
190190
:param key: the key to be deleted.
191191
192192
:raises: ValueError if key is not complete, or if the key's
193-
``dataset_id`` does not match ours.
193+
``project`` does not match ours.
194194
"""
195195
if key.is_partial:
196196
raise ValueError("Key must be complete")
197197

198-
if not _dataset_ids_equal(self.dataset_id, key.dataset_id):
199-
raise ValueError("Key must be from same dataset as batch")
198+
if not _projects_equal(self.project, key.project):
199+
raise ValueError("Key must be from same project as batch")
200200

201201
key_pb = helpers._prepare_key_for_request(key.to_protobuf())
202202
self._add_delete_key_pb().CopyFrom(key_pb)
@@ -215,7 +215,7 @@ def commit(self):
215215
context manager.
216216
"""
217217
_, updated_keys = self.connection.commit(
218-
self.dataset_id, self._commit_request, self._id)
218+
self.project, self._commit_request, self._id)
219219
# If the back-end returns without error, we are guaranteed that
220220
# :meth:`Connection.commit` will return keys that match (length and
221221
# order) directly ``_partial_key_entities``.

‎gcloud/datastore/client.py

Copy file name to clipboardExpand all lines: gcloud/datastore/client.py
+47-53Lines changed: 47 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14-
"""Convenience wrapper for invoking APIs/factories w/ a dataset ID."""
14+
"""Convenience wrapper for invoking APIs/factories w/ a project."""
1515

1616
import os
1717

@@ -23,8 +23,8 @@
2323
from gcloud.datastore.connection import Connection
2424
from gcloud.datastore.batch import Batch
2525
from gcloud.datastore.entity import Entity
26+
from gcloud.datastore.key import _projects_equal
2627
from gcloud.datastore.key import Key
27-
from gcloud.datastore.key import _dataset_ids_equal
2828
from gcloud.datastore.query import Query
2929
from gcloud.datastore.transaction import Transaction
3030
from gcloud.environment_vars import DATASET
@@ -35,18 +35,18 @@
3535
"""Maximum number of iterations to wait for deferred keys."""
3636

3737

38-
def _get_production_dataset_id():
38+
def _get_production_project():
3939
"""Gets the production application ID if it can be inferred."""
4040
return os.getenv(DATASET)
4141

4242

43-
def _get_gcd_dataset_id():
43+
def _get_gcd_project():
4444
"""Gets the GCD application ID if it can be inferred."""
4545
return os.getenv(GCD_DATASET)
4646

4747

48-
def _determine_default_dataset_id(dataset_id=None):
49-
"""Determine default dataset ID explicitly or implicitly as fall-back.
48+
def _determine_default_project(project=None):
49+
"""Determine default project explicitly or implicitly as fall-back.
5050
5151
In implicit case, supports four environments. In order of precedence, the
5252
implicit environments are:
@@ -56,28 +56,28 @@ def _determine_default_dataset_id(dataset_id=None):
5656
* Google App Engine application ID
5757
* Google Compute Engine project ID (from metadata server)
5858
59-
:type dataset_id: string
60-
:param dataset_id: Optional. The dataset ID to use as default.
59+
:type project: string
60+
:param project: Optional. The project to use as default.
6161
6262
:rtype: string or ``NoneType``
63-
:returns: Default dataset ID if it can be determined.
63+
:returns: Default project if it can be determined.
6464
"""
65-
if dataset_id is None:
66-
dataset_id = _get_production_dataset_id()
65+
if project is None:
66+
project = _get_production_project()
6767

68-
if dataset_id is None:
69-
dataset_id = _get_gcd_dataset_id()
68+
if project is None:
69+
project = _get_gcd_project()
7070

71-
if dataset_id is None:
72-
dataset_id = _app_engine_id()
71+
if project is None:
72+
project = _app_engine_id()
7373

74-
if dataset_id is None:
75-
dataset_id = _compute_engine_id()
74+
if project is None:
75+
project = _compute_engine_id()
7676

77-
return dataset_id
77+
return project
7878

7979

80-
def _extended_lookup(connection, dataset_id, key_pbs,
80+
def _extended_lookup(connection, project, key_pbs,
8181
missing=None, deferred=None,
8282
eventual=False, transaction_id=None):
8383
"""Repeat lookup until all keys found (unless stop requested).
@@ -87,8 +87,8 @@ def _extended_lookup(connection, dataset_id, key_pbs,
8787
:type connection: :class:`gcloud.datastore.connection.Connection`
8888
:param connection: The connection used to connect to datastore.
8989
90-
:type dataset_id: string
91-
:param dataset_id: The ID of the dataset of which to make the request.
90+
:type project: string
91+
:param project: The project to make the request for.
9292
9393
:type key_pbs: list of :class:`gcloud.datastore._generated.entity_pb2.Key`
9494
:param key_pbs: The keys to retrieve from the datastore.
@@ -130,7 +130,7 @@ def _extended_lookup(connection, dataset_id, key_pbs,
130130
loop_num += 1
131131

132132
results_found, missing_found, deferred_found = connection.lookup(
133-
dataset_id=dataset_id,
133+
project=project,
134134
key_pbs=key_pbs,
135135
eventual=eventual,
136136
transaction_id=transaction_id,
@@ -156,10 +156,10 @@ def _extended_lookup(connection, dataset_id, key_pbs,
156156

157157

158158
class Client(_BaseClient):
159-
"""Convenience wrapper for invoking APIs/factories w/ a dataset ID.
159+
"""Convenience wrapper for invoking APIs/factories w/ a project.
160160
161-
:type dataset_id: string
162-
:param dataset_id: (optional) dataset ID to pass to proxied API methods.
161+
:type project: string
162+
:param project: (optional) The project to pass to proxied API methods.
163163
164164
:type namespace: string
165165
:param namespace: (optional) namespace to pass to proxied API methods.
@@ -178,12 +178,12 @@ class Client(_BaseClient):
178178
"""
179179
_connection_class = Connection
180180

181-
def __init__(self, dataset_id=None, namespace=None,
181+
def __init__(self, project=None, namespace=None,
182182
credentials=None, http=None):
183-
dataset_id = _determine_default_dataset_id(dataset_id)
184-
if dataset_id is None:
185-
raise EnvironmentError('Dataset ID could not be inferred.')
186-
self.dataset_id = dataset_id
183+
project = _determine_default_project(project)
184+
if project is None:
185+
raise EnvironmentError('Project could not be inferred.')
186+
self.project = project
187187
self.namespace = namespace
188188
self._batch_stack = _LocalStack()
189189
super(Client, self).__init__(credentials, http)
@@ -281,22 +281,22 @@ def get_multi(self, keys, missing=None, deferred=None):
281281
282282
:rtype: list of :class:`gcloud.datastore.entity.Entity`
283283
:returns: The requested entities.
284-
:raises: :class:`ValueError` if one or more of ``keys`` has a dataset
285-
ID which does not match our dataset ID.
284+
:raises: :class:`ValueError` if one or more of ``keys`` has a project
285+
which does not match our project.
286286
"""
287287
if not keys:
288288
return []
289289

290-
ids = set(key.dataset_id for key in keys)
290+
ids = set(key.project for key in keys)
291291
for current_id in ids:
292-
if not _dataset_ids_equal(current_id, self.dataset_id):
293-
raise ValueError('Keys do not match dataset ID')
292+
if not _projects_equal(current_id, self.project):
293+
raise ValueError('Keys do not match project')
294294

295295
transaction = self.current_transaction
296296

297297
entity_pbs = _extended_lookup(
298298
connection=self.connection,
299-
dataset_id=self.dataset_id,
299+
project=self.project,
300300
key_pbs=[k.to_protobuf() for k in keys],
301301
missing=missing,
302302
deferred=deferred,
@@ -414,7 +414,7 @@ def allocate_ids(self, incomplete_key, num_ids):
414414
incomplete_key_pbs = [incomplete_key_pb] * num_ids
415415

416416
conn = self.connection
417-
allocated_key_pbs = conn.allocate_ids(incomplete_key.dataset_id,
417+
allocated_key_pbs = conn.allocate_ids(incomplete_key.project,
418418
incomplete_key_pbs)
419419
allocated_ids = [allocated_key_pb.path_element[-1].id
420420
for allocated_key_pb in allocated_key_pbs]
@@ -424,39 +424,33 @@ def allocate_ids(self, incomplete_key, num_ids):
424424
def key(self, *path_args, **kwargs):
425425
"""Proxy to :class:`gcloud.datastore.key.Key`.
426426
427-
Passes our ``dataset_id``.
427+
Passes our ``project``.
428428
"""
429-
if 'dataset_id' in kwargs:
430-
raise TypeError('Cannot pass dataset_id')
431-
kwargs['dataset_id'] = self.dataset_id
429+
if 'project' in kwargs:
430+
raise TypeError('Cannot pass project')
431+
kwargs['project'] = self.project
432432
if 'namespace' not in kwargs:
433433
kwargs['namespace'] = self.namespace
434434
return Key(*path_args, **kwargs)
435435

436436
def batch(self):
437-
"""Proxy to :class:`gcloud.datastore.batch.Batch`.
438-
439-
Passes our ``dataset_id``.
440-
"""
437+
"""Proxy to :class:`gcloud.datastore.batch.Batch`."""
441438
return Batch(self)
442439

443440
def transaction(self):
444-
"""Proxy to :class:`gcloud.datastore.transaction.Transaction`.
445-
446-
Passes our ``dataset_id``.
447-
"""
441+
"""Proxy to :class:`gcloud.datastore.transaction.Transaction`."""
448442
return Transaction(self)
449443

450444
def query(self, **kwargs):
451445
"""Proxy to :class:`gcloud.datastore.query.Query`.
452446
453-
Passes our ``dataset_id``.
447+
Passes our ``project``.
454448
"""
455449
if 'client' in kwargs:
456450
raise TypeError('Cannot pass client')
457-
if 'dataset_id' in kwargs:
458-
raise TypeError('Cannot pass dataset_id')
459-
kwargs['dataset_id'] = self.dataset_id
451+
if 'project' in kwargs:
452+
raise TypeError('Cannot pass project')
453+
kwargs['project'] = self.project
460454
if 'namespace' not in kwargs:
461455
kwargs['namespace'] = self.namespace
462456
return Query(self, **kwargs)

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.