Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 362537a

Browse filesBrowse files
tswastJon Wayne Parrott
authored andcommitted
BigQuery : add sample for writing query results to a destination table. (GoogleCloudPlatform#1101)
See: https://cloud.google.com/bigquery/docs/writing-results
1 parent 058296f commit 362537a
Copy full SHA for 362537a

File tree

Expand file treeCollapse file tree

2 files changed

+53
-0
lines changed
Filter options
Expand file treeCollapse file tree

2 files changed

+53
-0
lines changed

‎bigquery/cloud-client/query.py

Copy file name to clipboardExpand all lines: bigquery/cloud-client/query.py
+33Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,30 @@ def query_standard_sql(query):
6363
print(row)
6464

6565

66+
def query_destination_table(query, dest_dataset_id, dest_table_id):
67+
client = bigquery.Client()
68+
query_job = client.run_async_query(str(uuid.uuid4()), query)
69+
70+
# Allow for query results larger than the maximum response size.
71+
query_job.allow_large_results = True
72+
73+
# When large results are allowed, a destination table must be set.
74+
dest_dataset = client.dataset(dest_dataset_id)
75+
dest_table = dest_dataset.table(dest_table_id)
76+
query_job.destination = dest_table
77+
78+
# Allow the results table to be overwritten.
79+
query_job.write_disposition = 'WRITE_TRUNCATE'
80+
81+
query_job.begin()
82+
query_job.result() # Wait for job to complete.
83+
84+
# Verify that the results were written to the destination table.
85+
dest_table.reload() # Get the table metadata, such as the schema.
86+
for row in dest_table.fetch_data():
87+
print(row)
88+
89+
6690
if __name__ == '__main__':
6791
parser = argparse.ArgumentParser(
6892
description=__doc__,
@@ -72,10 +96,19 @@ def query_standard_sql(query):
7296
'--use_standard_sql',
7397
action='store_true',
7498
help='Use standard SQL syntax.')
99+
parser.add_argument(
100+
'--destination_table',
101+
type=str,
102+
help=(
103+
'Destination table to use for results. '
104+
'Example: my_dataset.my_table'))
75105

76106
args = parser.parse_args()
77107

78108
if args.use_standard_sql:
79109
query_standard_sql(args.query)
110+
elif args.destination_table:
111+
dataset, table = args.destination_table.split('.')
112+
query_destination_table(args.query, dataset, table)
80113
else:
81114
query(args.query)

‎bigquery/cloud-client/query_test.py

Copy file name to clipboardExpand all lines: bigquery/cloud-client/query_test.py
+20Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,10 @@
1515
import query
1616

1717

18+
DATASET_ID = 'test_dataset'
19+
TABLE_ID = 'test_destination_table'
20+
21+
1822
def test_query(capsys):
1923
# Query only outputs the first 10 rows, sort results to avoid randomness
2024
query_string = '''#standardSQL
@@ -44,3 +48,19 @@ def test_query_standard_sql(capsys):
4448
out, _ = capsys.readouterr()
4549

4650
assert 'antonyandcleopatra' in out
51+
52+
53+
def test_query_destination_table(capsys):
54+
# Query only outputs the first 10 rows, sort results to avoid randomness
55+
query_string = '''#standardSQL
56+
SELECT corpus
57+
FROM `publicdata.samples.shakespeare`
58+
GROUP BY corpus
59+
ORDER BY corpus
60+
LIMIT 10;'''
61+
62+
query.query_destination_table(query_string, DATASET_ID, TABLE_ID)
63+
64+
out, _ = capsys.readouterr()
65+
66+
assert 'antonyandcleopatra' in out

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.