Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit d25c944

Browse filesBrowse files
authored
fix(datastore): allow running schedule-export from cloud functions console (GoogleCloudPlatform#5731)
* fix(datastore): allow running schedule-export from cloud functions console
1 parent fd8157a commit d25c944
Copy full SHA for d25c944

File tree

Expand file treeCollapse file tree

1 file changed

+20
-18
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+20
-18
lines changed

‎datastore/schedule-export/main.py

Copy file name to clipboardExpand all lines: datastore/schedule-export/main.py
+20-18Lines changed: 20 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
1-
# Copyright 2021 Google LLC
1+
# Copyright 2021 Google LLC All Rights Reserved.
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
55
# You may obtain a copy of the License at
66
#
7-
# http://www.apache.org/licenses/LICENSE-2.0
7+
# http://www.apache.org/licenses/LICENSE-2.0
88
#
99
# Unless required by applicable law or agreed to in writing, software
1010
# distributed under the License is distributed on an "AS IS" BASIS,
@@ -32,12 +32,12 @@ def set(self, url, content):
3232

3333
# The default cache (file_cache) is unavailable when using oauth2client >= 4.0.0 or google-auth,
3434
# and it will log worrisome messages unless given another interface to use.
35-
datastore = build('datastore', 'v1', cache=MemoryCache())
36-
project_id = os.environ.get('GCP_PROJECT')
35+
datastore = build("datastore", "v1", cache=MemoryCache())
36+
project_id = os.environ.get("GCP_PROJECT")
3737

3838

3939
def datastore_export(event, context):
40-
'''Triggers a Datastore export from a Cloud Scheduler job.
40+
"""Triggers a Datastore export from a Cloud Scheduler job.
4141
4242
Args:
4343
event (dict): event[data] must contain a json object encoded in
@@ -46,26 +46,28 @@ def datastore_export(event, context):
4646
and 'namespaceIds' values.
4747
context (google.cloud.functions.Context): The Cloud Functions event
4848
metadata.
49-
'''
49+
"""
5050

51-
json_data = json.loads(base64.b64decode(event['data']).decode('utf-8'))
52-
bucket = json_data['bucket']
51+
if "data" in event:
52+
# Triggered via Cloud Scheduler, decode the inner data field of the json payload.
53+
json_data = json.loads(base64.b64decode(event["data"]).decode("utf-8"))
54+
else:
55+
# Otherwise, for instance if triggered via the Cloud Console on a Cloud Function, the event is the data.
56+
json_data = event
57+
58+
bucket = json_data["bucket"]
5359
entity_filter = {}
5460

55-
if 'kinds' in json_data:
56-
entity_filter['kinds'] = json_data['kinds']
61+
if "kinds" in json_data:
62+
entity_filter["kinds"] = json_data["kinds"]
5763

58-
if 'namespaceIds' in json_data:
59-
entity_filter['namespaceIds'] = json_data['namespaceIds']
64+
if "namespaceIds" in json_data:
65+
entity_filter["namespaceIds"] = json_data["namespaceIds"]
6066

61-
request_body = {
62-
'outputUrlPrefix': bucket,
63-
'entityFilter': entity_filter
64-
}
67+
request_body = {"outputUrlPrefix": bucket, "entityFilter": entity_filter}
6568

6669
export_request = datastore.projects().export(
67-
projectId=project_id,
68-
body=request_body
70+
projectId=project_id, body=request_body
6971
)
7072
response = export_request.execute()
7173
print(response)

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.