From 3593c7c0610b632af200b278bf6f96e3bc913a0e Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 26 Jan 2024 17:12:07 +0000 Subject: [PATCH 1/9] chore: ensure colab sample notebooks are tested --- noxfile.py | 53 ++++++++++--------- scripts/notebooks_fill_params.py | 65 ++++++++++++++++++++++++ scripts/notebooks_restore_from_backup.py | 32 ++++++++++++ 3 files changed, 123 insertions(+), 27 deletions(-) create mode 100644 scripts/notebooks_fill_params.py create mode 100644 scripts/notebooks_restore_from_backup.py diff --git a/noxfile.py b/noxfile.py index 873b60ce91..3e4f494d81 100644 --- a/noxfile.py +++ b/noxfile.py @@ -653,7 +653,13 @@ def system_prerelease(session: nox.sessions.Session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def notebook(session): +def notebook(session: nox.Session): + GOOGLE_CLOUD_PROJECT = os.getenv("GOOGLE_CLOUD_PROJECT") + if not GOOGLE_CLOUD_PROJECT: + session.error( + "Set GOOGLE_CLOUD_PROJECT environment variable to run notebook session." + ) + session.install("-e", ".[all]") session.install("pytest", "pytest-xdist", "pytest-retry", "nbmake") @@ -662,22 +668,6 @@ def notebook(session): denylist = [ # Regionalized testing is manually added later. "notebooks/location/regionalized.ipynb", - # These notebooks contain special colab `param {type:"string"}` - # comments, which make it easy for customers to fill in their - # own information. - # TODO(ashleyxu): Test these notebooks by replacing parameters with - # appropriate values and omitting cleanup logic that may break - # our test infrastructure. - "notebooks/getting_started/getting_started_bq_dataframes.ipynb", - "notebooks/getting_started/ml_fundamentals_bq_dataframes.ipynb", - "notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb", - "notebooks/generative_ai/bq_dataframes_llm_kmeans.ipynb", - "notebooks/regression/bq_dataframes_ml_linear_regression.ipynb", - "notebooks/generative_ai/bq_dataframes_ml_drug_name_generation.ipynb", - "notebooks/vertex_sdk/sdk2_bigframes_pytorch.ipynb", - "notebooks/vertex_sdk/sdk2_bigframes_sklearn.ipynb", - "notebooks/vertex_sdk/sdk2_bigframes_tensorflow.ipynb", - "notebooks/visualization/bq_dataframes_covid_line_graphs.ipynb", # The experimental notebooks imagine features that don't yet # exist or only exist as temporary prototypes. "notebooks/experimental/longer_ml_demo.ipynb", @@ -705,9 +695,9 @@ def notebook(session): for nb, regions in notebooks_reg.items() } - # For some reason nbmake exits silently with "no tests ran" message if + # The pytest --nbmake exits silently with "no tests ran" message if # one of the notebook paths supplied does not exist. Let's make sure that - # each path exists + # each path exists. for nb in notebooks + list(notebooks_reg): assert os.path.exists(nb), nb @@ -719,16 +709,25 @@ def notebook(session): pytest_command = [ "py.test", "--nbmake", - "--nbmake-timeout=600", + "--nbmake-timeout=900", # 15 minutes ] - # Run self-contained notebooks in single session.run - # achieve parallelization via -n - session.run( - *pytest_command, - "-nauto", - *notebooks, - ) + try: + # Populate notebook parameters and make a backup so that the notebooks + # are runnable. + session.run("scripts/notebooks_fill_params.py", *notebooks) + + # Run self-contained notebooks in single session.run + # achieve parallelization via -n + session.run( + *pytest_command, + "-nauto", + *notebooks, + ) + finally: + # Prevent our notebook changes from getting checked in to git + # accidentally. + session.run("scripts/notebooks_restore_from_backup.py", *notebooks) # Run regionalized notebooks in parallel session.run's, since each notebook # takes a different region via env param. diff --git a/scripts/notebooks_fill_params.py b/scripts/notebooks_fill_params.py new file mode 100644 index 0000000000..193b96e64c --- /dev/null +++ b/scripts/notebooks_fill_params.py @@ -0,0 +1,65 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +import re +import shutil +import sys + +GOOGLE_CLOUD_PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] + + +def make_backup(notebook_path: str): + shutil.copy( + notebook_path, + f"{notebook_path}.backup", + ) + + +def replace_project(line): + """ + Notebooks contain special colab `param {type:"string"}` + comments, which make it easy for customers to fill in their + own information. + """ + # Make sure we're robust to whitespace differences. + cleaned = re.sub(r"\s", "", line) + if cleaned == 'PROJECT_ID=""#@param{type:"string"}': + return f'PROJECT_ID = "{GOOGLE_CLOUD_PROJECT}" # @param {{type:"string"}}\n' + else: + return line + + +def replace_params(notebook_path: str): + with open(notebook_path, "r", encoding="utf-8") as notebook_file: + notebook_json = json.load(notebook_file) + + for cell in notebook_json["cells"]: + lines = cell.get("source", []) + new_lines = [replace_project(line) for line in lines] + cell["source"] = new_lines + + with open(notebook_path, "w", encoding="utf-8") as notebook_file: + json.dump(notebook_json, notebook_file, indent=1, ensure_ascii=False) + + +def main(notebook_paths): + for notebook_path in notebook_paths: + make_backup(notebook_path) + replace_params(notebook_path) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/scripts/notebooks_restore_from_backup.py b/scripts/notebooks_restore_from_backup.py new file mode 100644 index 0000000000..735e127f2e --- /dev/null +++ b/scripts/notebooks_restore_from_backup.py @@ -0,0 +1,32 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import shutil +import sys + + +def restore_from_backup(notebook_path): + shutil.move( + f"{notebook_path}.backup", + notebook_path, + ) + + +def main(notebook_paths): + for notebook_path in notebook_paths: + restore_from_backup(notebook_path) + + +if __name__ == "__main__": + main(sys.argv[1:]) From f91ce030c6ff02f98615c7acf9f461c716b15884 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 26 Jan 2024 17:16:40 +0000 Subject: [PATCH 2/9] make restore from backup robust to when the backup doesn't exist --- scripts/notebooks_restore_from_backup.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/notebooks_restore_from_backup.py b/scripts/notebooks_restore_from_backup.py index 735e127f2e..4d3e0333e3 100644 --- a/scripts/notebooks_restore_from_backup.py +++ b/scripts/notebooks_restore_from_backup.py @@ -12,15 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pathlib import shutil import sys def restore_from_backup(notebook_path): - shutil.move( - f"{notebook_path}.backup", - notebook_path, - ) + backup_path = pathlib.Path(f"{notebook_path}.backup") + if backup_path.exists(): + shutil.move( + backup_path, + notebook_path, + ) def main(notebook_paths): From a3ec5466f5e0958006a6817cdc7b9d48fbcd5b9a Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 26 Jan 2024 17:20:40 +0000 Subject: [PATCH 3/9] fix path to notebook params scripts --- noxfile.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/noxfile.py b/noxfile.py index 3e4f494d81..bab8a9b6d6 100644 --- a/noxfile.py +++ b/noxfile.py @@ -715,7 +715,11 @@ def notebook(session: nox.Session): try: # Populate notebook parameters and make a backup so that the notebooks # are runnable. - session.run("scripts/notebooks_fill_params.py", *notebooks) + session.run( + "python", + CURRENT_DIRECTORY / "scripts" / "notebooks_fill_params.py", + *notebooks, + ) # Run self-contained notebooks in single session.run # achieve parallelization via -n @@ -727,7 +731,11 @@ def notebook(session: nox.Session): finally: # Prevent our notebook changes from getting checked in to git # accidentally. - session.run("scripts/notebooks_restore_from_backup.py", *notebooks) + session.run( + "python", + CURRENT_DIRECTORY / "scripts" / "notebooks_restore_from_backup.py", + *notebooks, + ) # Run regionalized notebooks in parallel session.run's, since each notebook # takes a different region via env param. From 5904960bfc701fd24277479d17ea637f4d825d03 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 26 Jan 2024 18:25:08 +0000 Subject: [PATCH 4/9] exclude notebooks that need parameters other than project_id --- noxfile.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index bab8a9b6d6..53349b8a52 100644 --- a/noxfile.py +++ b/noxfile.py @@ -661,13 +661,31 @@ def notebook(session: nox.Session): ) session.install("-e", ".[all]") - session.install("pytest", "pytest-xdist", "pytest-retry", "nbmake") + session.install( + "pytest", "pytest-xdist", "pytest-retry", "nbmake", "google-cloud-aiplatform" + ) notebooks_list = list(Path("notebooks/").glob("*/*.ipynb")) denylist = [ # Regionalized testing is manually added later. "notebooks/location/regionalized.ipynb", + # These notebooks contain special colab `param {type:"string"}` + # comments, which make it easy for customers to fill in their + # own information. + # + # With the notebooks_fill_params.py script, we are able to find and + # replace the PROJECT_ID parameter, but not the others. + # + # TODO(ashleyxu): Test these notebooks by replacing parameters with + # appropriate values and omitting cleanup logic that may break + # our test infrastructure. + "notebooks/getting_started/ml_fundamentals_bq_dataframes.ipynb", # Needs DATASET. + "notebooks/regression/bq_dataframes_ml_linear_regression.ipynb", # Needs DATASET_ID. + "notebooks/generative_ai/bq_dataframes_ml_drug_name_generation.ipynb", # Needs CONNECTION. + "notebooks/vertex_sdk/sdk2_bigframes_pytorch.ipynb", # Needs BUCKET_URI. + "notebooks/vertex_sdk/sdk2_bigframes_sklearn.ipynb", # Needs BUCKET_URI. + "notebooks/vertex_sdk/sdk2_bigframes_tensorflow.ipynb", # Needs BUCKET_URI. # The experimental notebooks imagine features that don't yet # exist or only exist as temporary prototypes. "notebooks/experimental/longer_ml_demo.ipynb", From b5cf2ed43b576cb87f77d32a1b4626532b839591 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 26 Jan 2024 18:50:07 +0000 Subject: [PATCH 5/9] add missing dependencies --- .../getting_started/getting_started_bq_dataframes.ipynb | 8 +++++++- noxfile.py | 7 ++++++- scripts/notebooks_fill_params.py | 2 +- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/notebooks/getting_started/getting_started_bq_dataframes.ipynb b/notebooks/getting_started/getting_started_bq_dataframes.ipynb index 18be5e48fd..71a46e3fd7 100644 --- a/notebooks/getting_started/getting_started_bq_dataframes.ipynb +++ b/notebooks/getting_started/getting_started_bq_dataframes.ipynb @@ -532,6 +532,10 @@ }, "outputs": [], "source": [ + "# BigQuery DataFrames can read directly from GCS.\n", + "fn = 'gs://cloud-samples-data/vertex-ai/bigframe/penguins.csv'\n", + "\n", + "# Or from a local file.\n", "# fn = 'penguins.csv'" ] }, @@ -580,7 +584,9 @@ }, "outputs": [], "source": [ - "df_from_local = bf.read_csv(fn)" + "# If order is not important, use the \"bigquery\" engine to\n", + "# allow BigQuery DataFrames to read directly from GCS.\n", + "df_from_local = bf.read_csv(fn, engine=\"bigquery\")" ] }, { diff --git a/noxfile.py b/noxfile.py index 53349b8a52..3c0a743269 100644 --- a/noxfile.py +++ b/noxfile.py @@ -662,7 +662,12 @@ def notebook(session: nox.Session): session.install("-e", ".[all]") session.install( - "pytest", "pytest-xdist", "pytest-retry", "nbmake", "google-cloud-aiplatform" + "pytest", + "pytest-xdist", + "pytest-retry", + "nbmake", + "google-cloud-aiplatform", + "matplotlib", ) notebooks_list = list(Path("notebooks/").glob("*/*.ipynb")) diff --git a/scripts/notebooks_fill_params.py b/scripts/notebooks_fill_params.py index 193b96e64c..e0f7c8d687 100644 --- a/scripts/notebooks_fill_params.py +++ b/scripts/notebooks_fill_params.py @@ -52,7 +52,7 @@ def replace_params(notebook_path: str): cell["source"] = new_lines with open(notebook_path, "w", encoding="utf-8") as notebook_file: - json.dump(notebook_json, notebook_file, indent=1, ensure_ascii=False) + json.dump(notebook_json, notebook_file, indent=2, ensure_ascii=False) def main(notebook_paths): From 0e8ece0015a4ecb1afc05537b57c8ac11ae9e1de Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 26 Jan 2024 19:20:14 +0000 Subject: [PATCH 6/9] notebook testing fixes --- .../getting_started/getting_started_bq_dataframes.ipynb | 7 +++++-- noxfile.py | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/notebooks/getting_started/getting_started_bq_dataframes.ipynb b/notebooks/getting_started/getting_started_bq_dataframes.ipynb index 71a46e3fd7..733a732cc4 100644 --- a/notebooks/getting_started/getting_started_bq_dataframes.ipynb +++ b/notebooks/getting_started/getting_started_bq_dataframes.ipynb @@ -664,7 +664,10 @@ }, "outputs": [], "source": [ - "df_from_local.to_gbq(PROJECT_ID + \".\" + DATASET_ID + \".penguins\")" + "df_from_local.to_gbq(\n", + " PROJECT_ID + \".\" + DATASET_ID + \".penguins\",\n", + " if_exists=\"replace\",\n", + ")" ] }, { @@ -777,7 +780,7 @@ }, "outputs": [], "source": [ - "bq_df[\"species\", \"body_mass_g\"].groupby(by=bq_df[\"species\"]).mean(numeric_only=True).head()" + "bq_df[[\"species\", \"body_mass_g\"]].groupby(by=bq_df[\"species\"]).mean(numeric_only=True).head()" ] }, { diff --git a/noxfile.py b/noxfile.py index 3c0a743269..0b187e2884 100644 --- a/noxfile.py +++ b/noxfile.py @@ -668,6 +668,7 @@ def notebook(session: nox.Session): "nbmake", "google-cloud-aiplatform", "matplotlib", + "seaborn", ) notebooks_list = list(Path("notebooks/").glob("*/*.ipynb")) From dfee838b5c41ed6df51861a023454c4b1bbfc689 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 29 Jan 2024 16:31:00 +0000 Subject: [PATCH 7/9] add sleep to avoid some bucket flakiness --- .../bq_dataframes_llm_code_generation.ipynb | 68 +++++++++++++++++-- 1 file changed, 61 insertions(+), 7 deletions(-) diff --git a/notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb b/notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb index b2966c404c..ffc1adf93b 100644 --- a/notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb +++ b/notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb @@ -370,6 +370,11 @@ "This section walks through a few steps required in order to use the model in your notebook." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + }, { "cell_type": "markdown", "metadata": { @@ -670,11 +675,19 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { "id": "-J5LHgS6LLZ0" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Creating gs://code-samples-8094e844-bec3-11ee-8298-4201c0a8181f/...\n" + ] + } + ], "source": [ "import uuid\n", "BUCKET_ID = \"code-samples-\" + str(uuid.uuid1())\n", @@ -682,6 +695,19 @@ "!gsutil mb gs://{BUCKET_ID}" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import time\n", + "\n", + "# Wait one minute so that the permissions on the new bucket are up-to-date.\n", + "# https://cloud.google.com/storage/docs/consistency#eventually_consistent_operations\n", + "time.sleep(60)" + ] + }, { "cell_type": "markdown", "metadata": { @@ -785,16 +811,32 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": { "id": "iQFo6OUBLmi3" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Removing gs://code-samples-8094e844-bec3-11ee-8298-4201c0a8181f/...\n", + "Deleted bucket 'code-samples-8094e844-bec3-11ee-8298-4201c0a8181f'.\n" + ] + } + ], "source": [ - "# # Delete the Google Cloud Storage bucket and files\n", - "# ! gsutil rm -r gs://{BUCKET_ID}\n", - "# print(f\"Deleted bucket '{BUCKET_ID}'.\")" + "# Delete the Google Cloud Storage bucket and files\n", + "! gsutil rm -r gs://{BUCKET_ID}\n", + "print(f\"Deleted bucket '{BUCKET_ID}'.\")" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -805,6 +847,18 @@ "kernelspec": { "display_name": "Python 3", "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" } }, "nbformat": 4, From c92b9da355a9da5af1d86dba721c0b70cc380b8e Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 29 Jan 2024 17:31:55 +0000 Subject: [PATCH 8/9] Revert "add sleep to avoid some bucket flakiness" This reverts commit dfee838b5c41ed6df51861a023454c4b1bbfc689. --- .../bq_dataframes_llm_code_generation.ipynb | 68 ++----------------- 1 file changed, 7 insertions(+), 61 deletions(-) diff --git a/notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb b/notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb index ffc1adf93b..b2966c404c 100644 --- a/notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb +++ b/notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb @@ -370,11 +370,6 @@ "This section walks through a few steps required in order to use the model in your notebook." ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [] - }, { "cell_type": "markdown", "metadata": { @@ -675,19 +670,11 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "id": "-J5LHgS6LLZ0" }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Creating gs://code-samples-8094e844-bec3-11ee-8298-4201c0a8181f/...\n" - ] - } - ], + "outputs": [], "source": [ "import uuid\n", "BUCKET_ID = \"code-samples-\" + str(uuid.uuid1())\n", @@ -695,19 +682,6 @@ "!gsutil mb gs://{BUCKET_ID}" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import time\n", - "\n", - "# Wait one minute so that the permissions on the new bucket are up-to-date.\n", - "# https://cloud.google.com/storage/docs/consistency#eventually_consistent_operations\n", - "time.sleep(60)" - ] - }, { "cell_type": "markdown", "metadata": { @@ -811,32 +785,16 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": { "id": "iQFo6OUBLmi3" }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Removing gs://code-samples-8094e844-bec3-11ee-8298-4201c0a8181f/...\n", - "Deleted bucket 'code-samples-8094e844-bec3-11ee-8298-4201c0a8181f'.\n" - ] - } - ], + "outputs": [], "source": [ - "# Delete the Google Cloud Storage bucket and files\n", - "! gsutil rm -r gs://{BUCKET_ID}\n", - "print(f\"Deleted bucket '{BUCKET_ID}'.\")" + "# # Delete the Google Cloud Storage bucket and files\n", + "# ! gsutil rm -r gs://{BUCKET_ID}\n", + "# print(f\"Deleted bucket '{BUCKET_ID}'.\")" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -847,18 +805,6 @@ "kernelspec": { "display_name": "Python 3", "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.9" } }, "nbformat": 4, From e5d9191e0ff9c613976d851d40840a0221b2603b Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 29 Jan 2024 17:33:55 +0000 Subject: [PATCH 9/9] exclude bq_dataframes_llm_code_generation sample --- noxfile.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/noxfile.py b/noxfile.py index 0b187e2884..657f4a7173 100644 --- a/noxfile.py +++ b/noxfile.py @@ -689,6 +689,9 @@ def notebook(session: nox.Session): "notebooks/getting_started/ml_fundamentals_bq_dataframes.ipynb", # Needs DATASET. "notebooks/regression/bq_dataframes_ml_linear_regression.ipynb", # Needs DATASET_ID. "notebooks/generative_ai/bq_dataframes_ml_drug_name_generation.ipynb", # Needs CONNECTION. + # TODO(swast): investigate why we get 404 errors, even though + # bq_dataframes_llm_code_generation creates a bucket in the sample. + "notebooks/generative_ai/bq_dataframes_llm_code_generation.ipynb", # Needs BUCKET_URI. "notebooks/vertex_sdk/sdk2_bigframes_pytorch.ipynb", # Needs BUCKET_URI. "notebooks/vertex_sdk/sdk2_bigframes_sklearn.ipynb", # Needs BUCKET_URI. "notebooks/vertex_sdk/sdk2_bigframes_tensorflow.ipynb", # Needs BUCKET_URI.