Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 79b2639

Browse filesBrowse files
authored
BigQuery: Adds authorized view tutorial (GoogleCloudPlatform#1535)
1 parent bc73929 commit 79b2639
Copy full SHA for 79b2639

File tree

Expand file treeCollapse file tree

3 files changed

+167
-1
lines changed
Filter options
Expand file treeCollapse file tree

3 files changed

+167
-1
lines changed
+108Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
#!/usr/bin/env python
2+
3+
# Copyright 2018 Google Inc.
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
18+
def run_authorized_view_tutorial():
19+
# Note to user: This is a group email for testing purposes. Replace with
20+
# your own group email address when running this code.
21+
analyst_group_email = 'example-analyst-group@google.com'
22+
23+
# [START bigquery_authorized_view_tutorial]
24+
# Create a source dataset
25+
# [START bigquery_avt_create_source_dataset]
26+
from google.cloud import bigquery
27+
28+
client = bigquery.Client()
29+
source_dataset_id = 'github_source_data'
30+
31+
source_dataset = bigquery.Dataset(client.dataset(source_dataset_id))
32+
# Specify the geographic location where the dataset should reside.
33+
source_dataset.location = 'US'
34+
source_dataset = client.create_dataset(source_dataset) # API request
35+
# [END bigquery_avt_create_source_dataset]
36+
37+
# Populate a source table
38+
# [START bigquery_avt_create_source_table]
39+
source_table_id = 'github_contributors'
40+
job_config = bigquery.QueryJobConfig()
41+
job_config.destination = source_dataset.table(source_table_id)
42+
sql = """
43+
SELECT commit, author, committer, repo_name
44+
FROM `bigquery-public-data.github_repos.commits`
45+
LIMIT 1000
46+
"""
47+
query_job = client.query(
48+
sql,
49+
# Location must match that of the dataset(s) referenced in the query
50+
# and of the destination table.
51+
location='US',
52+
job_config=job_config) # API request - starts the query
53+
54+
query_job.result() # Waits for the query to finish
55+
# [END bigquery_avt_create_source_table]
56+
57+
# Create a separate dataset to store your view
58+
# [START bigquery_avt_create_shared_dataset]
59+
shared_dataset_id = 'shared_views'
60+
shared_dataset = bigquery.Dataset(client.dataset(shared_dataset_id))
61+
shared_dataset.location = 'US'
62+
shared_dataset = client.create_dataset(shared_dataset) # API request
63+
# [END bigquery_avt_create_shared_dataset]
64+
65+
# Create the view in the new dataset
66+
# [START bigquery_avt_create_view]
67+
shared_view_id = 'github_analyst_view'
68+
view = bigquery.Table(shared_dataset.table(shared_view_id))
69+
sql_template = """
70+
SELECT
71+
commit, author.name as author,
72+
committer.name as committer, repo_name
73+
FROM
74+
`{}.{}.{}`
75+
"""
76+
view.view_query = sql_template.format(
77+
client.project, source_dataset_id, source_table_id)
78+
view = client.create_table(view) # API request
79+
# [END bigquery_avt_create_view]
80+
81+
# Assign access controls to the dataset containing the view
82+
# [START bigquery_avt_shared_dataset_access]
83+
# analyst_group_email = 'data_analysts@example.com'
84+
access_entries = shared_dataset.access_entries
85+
access_entries.append(
86+
bigquery.AccessEntry('READER', 'groupByEmail', analyst_group_email)
87+
)
88+
shared_dataset.access_entries = access_entries
89+
shared_dataset = client.update_dataset(
90+
shared_dataset, ['access_entries']) # API request
91+
# [END bigquery_avt_shared_dataset_access]
92+
93+
# Authorize the view to access the source dataset
94+
# [START bigquery_avt_source_dataset_access]
95+
access_entries = source_dataset.access_entries
96+
access_entries.append(
97+
bigquery.AccessEntry(None, 'view', view.reference.to_api_repr())
98+
)
99+
source_dataset.access_entries = access_entries
100+
source_dataset = client.update_dataset(
101+
source_dataset, ['access_entries']) # API request
102+
# [START bigquery_avt_source_dataset_access]
103+
# [END bigquery_authorized_view_tutorial]
104+
return (source_dataset, shared_dataset)
105+
106+
107+
if __name__ == '__main__':
108+
run_authorized_view_tutorial()
+58Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
# Copyright 2018 Google Inc.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
from google.cloud import bigquery
16+
import pytest
17+
18+
import authorized_view_tutorial
19+
20+
21+
@pytest.fixture(scope='module')
22+
def client():
23+
return bigquery.Client()
24+
25+
26+
@pytest.fixture
27+
def to_delete(client):
28+
doomed = []
29+
yield doomed
30+
for item in doomed:
31+
if isinstance(item, (bigquery.Dataset, bigquery.DatasetReference)):
32+
client.delete_dataset(item, delete_contents=True)
33+
elif isinstance(item, (bigquery.Table, bigquery.TableReference)):
34+
client.delete_table(item)
35+
else:
36+
item.delete()
37+
38+
39+
def test_authorized_view_tutorial(client, to_delete):
40+
source_dataset, shared_dataset = (
41+
authorized_view_tutorial.run_authorized_view_tutorial())
42+
to_delete.extend([source_dataset, shared_dataset])
43+
44+
analyst_email = 'example-analyst-group@google.com'
45+
analyst_entries = [entry for entry in shared_dataset.access_entries
46+
if entry.entity_id == analyst_email]
47+
assert len(analyst_entries) == 1
48+
assert analyst_entries[0].role == 'READER'
49+
50+
authorized_view_entries = [entry for entry in source_dataset.access_entries
51+
if entry.entity_type == 'view']
52+
expected_view_ref = {
53+
'projectId': client.project,
54+
'datasetId': 'shared_views',
55+
'tableId': 'github_analyst_view',
56+
}
57+
assert len(authorized_view_entries) == 1
58+
assert authorized_view_entries[0].entity_id == expected_view_ref
+1-1Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
google-cloud-bigquery==0.31.0
1+
google-cloud-bigquery==1.3.0
22
google-auth-oauthlib==0.2.0
33
pytz==2018.3

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.