Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 7e066d1

Browse filesBrowse files
shollymanleahecole
andauthored
fix: address issues with concurrent BigQuery tests (GoogleCloudPlatform#3426)
Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com>
1 parent fea7ae2 commit 7e066d1
Copy full SHA for 7e066d1

File tree

Expand file treeCollapse file tree

6 files changed

+88
-57
lines changed
Filter options
Expand file treeCollapse file tree

6 files changed

+88
-57
lines changed

‎bigquery/cloud-client/authorized_view_tutorial.py

Copy file name to clipboardExpand all lines: bigquery/cloud-client/authorized_view_tutorial.py
+18-1Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
# limitations under the License.
1616

1717

18-
def run_authorized_view_tutorial():
18+
def run_authorized_view_tutorial(override_values={}):
1919
# Note to user: This is a group email for testing purposes. Replace with
2020
# your own group email address when running this code.
2121
analyst_group_email = 'example-analyst-group@google.com'
@@ -28,6 +28,14 @@ def run_authorized_view_tutorial():
2828
client = bigquery.Client()
2929
source_dataset_id = 'github_source_data'
3030

31+
# [END bigquery_authorized_view_tutorial]
32+
# [END bigquery_avt_create_source_dataset]
33+
# To facilitate testing, we replace values with alternatives
34+
# provided by the testing harness.
35+
source_dataset_id = override_values.get("source_dataset_id", source_dataset_id)
36+
# [START bigquery_authorized_view_tutorial]
37+
# [START bigquery_avt_create_source_dataset]
38+
3139
source_dataset = bigquery.Dataset(client.dataset(source_dataset_id))
3240
# Specify the geographic location where the dataset should reside.
3341
source_dataset.location = 'US'
@@ -57,6 +65,15 @@ def run_authorized_view_tutorial():
5765
# Create a separate dataset to store your view
5866
# [START bigquery_avt_create_shared_dataset]
5967
shared_dataset_id = 'shared_views'
68+
69+
# [END bigquery_authorized_view_tutorial]
70+
# [END bigquery_avt_create_shared_dataset]
71+
# To facilitate testing, we replace values with alternatives
72+
# provided by the testing harness.
73+
shared_dataset_id = override_values.get("shared_dataset_id", shared_dataset_id)
74+
# [START bigquery_authorized_view_tutorial]
75+
# [START bigquery_avt_create_shared_dataset]
76+
6077
shared_dataset = bigquery.Dataset(client.dataset(shared_dataset_id))
6178
shared_dataset.location = 'US'
6279
shared_dataset = client.create_dataset(shared_dataset) # API request

‎bigquery/cloud-client/authorized_view_tutorial_test.py

Copy file name to clipboardExpand all lines: bigquery/cloud-client/authorized_view_tutorial_test.py
+15-13Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import uuid
16+
1517
from google.cloud import bigquery
1618
import pytest
1719

@@ -24,24 +26,24 @@ def client():
2426

2527

2628
@pytest.fixture
27-
def to_delete(client):
29+
def datasets_to_delete(client):
2830
doomed = []
2931
yield doomed
3032
for item in doomed:
31-
if isinstance(item, (bigquery.Dataset, bigquery.DatasetReference)):
32-
client.delete_dataset(item, delete_contents=True)
33-
elif isinstance(item, (bigquery.Table, bigquery.TableReference)):
34-
client.delete_table(item)
35-
else:
36-
item.delete()
33+
client.delete_dataset(item, delete_contents=True)
3734

3835

39-
def test_authorized_view_tutorial(client, to_delete):
40-
source_dataset_ref = client.dataset('github_source_data')
41-
shared_dataset_ref = client.dataset('shared_views')
42-
to_delete.extend([source_dataset_ref, shared_dataset_ref])
36+
def test_authorized_view_tutorial(client, datasets_to_delete):
37+
override_values = {
38+
"source_dataset_id": "github_source_data_{}".format(str(uuid.uuid4()).replace("-", "_")),
39+
"shared_dataset_id": "shared_views_{}".format(str(uuid.uuid4()).replace("-", "_")),
40+
}
41+
source_dataset_ref = client.dataset(override_values["source_dataset_id"])
42+
shared_dataset_ref = client.dataset(override_values["shared_dataset_id"])
43+
datasets_to_delete.extend([override_values["source_dataset_id"],
44+
override_values["shared_dataset_id"]])
4345

44-
authorized_view_tutorial.run_authorized_view_tutorial()
46+
authorized_view_tutorial.run_authorized_view_tutorial(override_values)
4547

4648
source_dataset = client.get_dataset(source_dataset_ref)
4749
shared_dataset = client.get_dataset(shared_dataset_ref)
@@ -55,7 +57,7 @@ def test_authorized_view_tutorial(client, to_delete):
5557
if entry.entity_type == 'view']
5658
expected_view_ref = {
5759
'projectId': client.project,
58-
'datasetId': 'shared_views',
60+
'datasetId': override_values["shared_dataset_id"],
5961
'tableId': 'github_analyst_view',
6062
}
6163
assert len(authorized_view_entries) == 1

‎bigquery/cloud-client/natality_tutorial.py

Copy file name to clipboardExpand all lines: bigquery/cloud-client/natality_tutorial.py
+9-3Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
# limitations under the License.
1616

1717

18-
def run_natality_tutorial():
18+
def run_natality_tutorial(override_values={}):
1919
# [START bigquery_query_natality_tutorial]
2020
"""Create a Google BigQuery linear regression input table.
2121
@@ -37,8 +37,14 @@ def run_natality_tutorial():
3737
client = bigquery.Client()
3838

3939
# Prepare a reference to a new dataset for storing the query results.
40-
dataset_ref = client.dataset('natality_regression')
41-
dataset = bigquery.Dataset(dataset_ref)
40+
dataset_id = 'natality_regression'
41+
# [END bigquery_query_natality_tutorial]
42+
# To facilitate testing, we replace values with alternatives
43+
# provided by the testing harness.
44+
dataset_id = override_values.get("dataset_id", dataset_id)
45+
# [START bigquery_query_natality_tutorial]
46+
47+
dataset = bigquery.Dataset(client.dataset(dataset_id))
4248

4349
# Create the new BigQuery dataset.
4450
dataset = client.create_dataset(dataset)

‎bigquery/cloud-client/natality_tutorial_test.py

Copy file name to clipboardExpand all lines: bigquery/cloud-client/natality_tutorial_test.py
+21-17Lines changed: 21 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -12,31 +12,35 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import uuid
16+
1517
from google.cloud import bigquery
16-
from google.cloud import exceptions
18+
import pytest
1719

1820
import natality_tutorial
1921

2022

21-
def dataset_exists(dataset, client):
22-
try:
23-
client.get_dataset(dataset)
24-
return True
25-
except exceptions.NotFound:
26-
return False
23+
@pytest.fixture(scope='module')
24+
def client():
25+
return bigquery.Client()
2726

2827

29-
def test_natality_tutorial():
30-
client = bigquery.Client()
31-
dataset_ref = client.dataset('natality_regression')
32-
assert not dataset_exists(dataset_ref, client)
28+
@pytest.fixture
29+
def datasets_to_delete(client):
30+
doomed = []
31+
yield doomed
32+
for item in doomed:
33+
client.delete_dataset(item, delete_contents=True)
3334

34-
natality_tutorial.run_natality_tutorial()
3535

36-
assert dataset_exists(dataset_ref, client)
36+
def test_natality_tutorial(client, datasets_to_delete):
37+
override_values = {
38+
"dataset_id": "natality_regression_{}".format(str(uuid.uuid4()).replace("-", "_")),
39+
}
40+
datasets_to_delete.append(override_values["dataset_id"])
3741

38-
table = client.get_table(
39-
bigquery.Table(dataset_ref.table('regression_input')))
40-
assert table.num_rows > 0
42+
natality_tutorial.run_natality_tutorial(override_values)
4143

42-
client.delete_dataset(dataset_ref, delete_contents=True)
44+
table_ref = bigquery.Dataset(client.dataset(override_values["dataset_id"])).table("regression_input")
45+
table = client.get_table(table_ref)
46+
assert table.num_rows > 0

‎bigquery/cloud-client/quickstart.py

Copy file name to clipboardExpand all lines: bigquery/cloud-client/quickstart.py
+7-1Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
# limitations under the License.
1616

1717

18-
def run_quickstart():
18+
def run_quickstart(override_values={}):
1919
# [START bigquery_quickstart]
2020
# Imports the Google Cloud client library
2121
from google.cloud import bigquery
@@ -26,6 +26,12 @@ def run_quickstart():
2626
# The name for the new dataset
2727
dataset_id = 'my_new_dataset'
2828

29+
# [END bigquery_quickstart]
30+
# To facilitate testing, we replace values with alternatives
31+
# provided by the testing harness.
32+
dataset_id = override_values.get("dataset_id", dataset_id)
33+
# [START bigquery_quickstart]
34+
2935
# Prepares a reference to the new dataset
3036
dataset_ref = bigquery_client.dataset(dataset_id)
3137
dataset = bigquery.Dataset(dataset_ref)

‎bigquery/cloud-client/quickstart_test.py

Copy file name to clipboardExpand all lines: bigquery/cloud-client/quickstart_test.py
+18-22Lines changed: 18 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,9 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import uuid
16+
1517
from google.cloud import bigquery
16-
from google.cloud.exceptions import NotFound
1718
import pytest
1819

1920
import quickstart
@@ -24,31 +25,26 @@
2425
DATASET_ID = 'my_new_dataset'
2526

2627

27-
@pytest.fixture
28-
def temporary_dataset():
29-
"""Fixture that ensures the test dataset does not exist before or
30-
after a test."""
31-
bigquery_client = bigquery.Client()
32-
dataset_ref = bigquery_client.dataset(DATASET_ID)
33-
34-
if dataset_exists(dataset_ref, bigquery_client):
35-
bigquery_client.delete_dataset(dataset_ref)
28+
@pytest.fixture(scope='module')
29+
def client():
30+
return bigquery.Client()
3631

37-
yield
3832

39-
if dataset_exists(dataset_ref, bigquery_client):
40-
bigquery_client.delete_dataset(dataset_ref)
33+
@pytest.fixture
34+
def datasets_to_delete(client):
35+
doomed = []
36+
yield doomed
37+
for item in doomed:
38+
client.delete_dataset(item, delete_contents=True)
4139

4240

43-
def dataset_exists(dataset, client):
44-
try:
45-
client.get_dataset(dataset)
46-
return True
47-
except NotFound:
48-
return False
41+
def test_quickstart(capsys, client, datasets_to_delete):
4942

43+
override_values = {
44+
"dataset_id": "my_new_dataset_{}".format(str(uuid.uuid4()).replace("-", "_")),
45+
}
46+
datasets_to_delete.append(override_values["dataset_id"])
5047

51-
def test_quickstart(capsys, temporary_dataset):
52-
quickstart.run_quickstart()
48+
quickstart.run_quickstart(override_values)
5349
out, _ = capsys.readouterr()
54-
assert DATASET_ID in out
50+
assert override_values["dataset_id"] in out

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.