diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
new file mode 100644
index 00000000..af599353
--- /dev/null
+++ b/.github/sync-repo-settings.yaml
@@ -0,0 +1,13 @@
+# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings
+# Rules for master branch protection
+branchProtectionRules:
+# Identifies the protection rule pattern. Name of the branch to be protected.
+# Defaults to `master`
+- pattern: master
+ requiredStatusCheckContexts:
+ - 'Kokoro'
+ - 'cla/google'
+ - 'Samples - Lint'
+ - 'Samples - Python 3.6'
+ - 'Samples - Python 3.7'
+ - 'Samples - Python 3.8'
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index 03b21293..948ebed5 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -30,7 +30,7 @@ env_vars: {
env_vars: {
key: "V2_STAGING_BUCKET"
- value: "docs-staging-v2-staging"
+ value: "docs-staging-v2"
}
# It will upload the docker image after successful builds.
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index 84052e6f..d92ddf8d 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.6"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py36"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 147291a2..8c221a6e 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.7"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index b447948a..fa5c7d2f 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.8"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index e5eb712a..449266b8 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
git checkout $LATEST_RELEASE
fi
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -101,4 +107,4 @@ cd "$ROOT"
# Workaround for Kokoro permissions issue: delete secrets
rm testing/{test-env.sh,client-secrets.json,service-account.json}
-exit "$RTN"
\ No newline at end of file
+exit "$RTN"
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..6ad83346
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,17 @@
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v3.3.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+- repo: https://github.com/psf/black
+ rev: 19.10b0
+ hooks:
+ - id: black
+- repo: https://gitlab.com/pycqa/flake8
+ rev: 3.8.4
+ hooks:
+ - id: flake8
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e748e9d7..01ff2728 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,27 @@
[1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history
+## [3.0.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v2.1.0...v3.0.0) (2020-12-09)
+
+
+### ⚠ BREAKING CHANGES
+
+* type is renamed to type_ to avoid conflict with built-in functions (introduced in googleapis/gapic-generator-python#595)
+
+### Features
+
+* add common resource path helpers ([#69](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/69)) ([e0bcedb](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/e0bcedb58109e38a58584d5b3087f03e1fa10835))
+
+
+### Bug Fixes
+
+* avoid collision with built-in functions by renaming type property to type_ ([#53](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/53)) ([b954411](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/b95441140f7c86dd3e833aef0532badd6280ef48)), closes [/github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py#L27-L32](https://www.github.com/googleapis//github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py/issues/L27-L32)
+
+
+### Documentation
+
+* update intersphinx links ([#78](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/78)) ([a78ba39](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/a78ba39bf1507cbc9e2a51fe4553d602da7f7601))
+
## 2.1.0
09-29-2020 09:34 PDT
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index b3d1f602..039f4368 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,44 +1,95 @@
-# Contributor Code of Conduct
+# Code of Conduct
-As contributors and maintainers of this project,
-and in the interest of fostering an open and welcoming community,
-we pledge to respect all people who contribute through reporting issues,
-posting feature requests, updating documentation,
-submitting pull requests or patches, and other activities.
+## Our Pledge
-We are committed to making participation in this project
-a harassment-free experience for everyone,
-regardless of level of experience, gender, gender identity and expression,
-sexual orientation, disability, personal appearance,
-body size, race, ethnicity, age, religion, or nationality.
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of
+experience, education, socio-economic status, nationality, personal appearance,
+race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information,
-such as physical or electronic
-addresses, without explicit permission
-* Other unethical or unprofessional conduct.
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct.
-By adopting this Code of Conduct,
-project maintainers commit themselves to fairly and consistently
-applying these principles to every aspect of managing this project.
-Project maintainers who do not follow or enforce the Code of Conduct
-may be permanently removed from the project team.
-
-This code of conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior
-may be reported by opening an issue
-or contacting one or more of the project maintainers.
-
-This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
-available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+This Code of Conduct also applies outside the project spaces when the Project
+Steward has a reasonable belief that an individual's behavior may have a
+negative impact on the project or its community.
+
+## Conflict Resolution
+
+We do not believe that all conflict is bad; healthy debate and disagreement
+often yield positive results. However, it is never okay to be disrespectful or
+to engage in behavior that violates the project’s code of conduct.
+
+If you see someone violating the code of conduct, you are encouraged to address
+the behavior directly with those involved. Many issues can be resolved quickly
+and easily, and this gives people more control over the outcome of their
+dispute. If you are unable to resolve the matter for any reason, or if the
+behavior is threatening or harassing, report it. We are dedicated to providing
+an environment where participants feel welcome and safe.
+
+
+Reports should be directed to *googleapis-stewards@google.com*, the
+Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
+receive and address reported violations of the code of conduct. They will then
+work with a committee consisting of representatives from the Open Source
+Programs Office and the Google Open Source Strategy team. If for any reason you
+are uncomfortable reaching out to the Project Steward, please email
+opensource@google.com.
+
+We will investigate every complaint, but you may not receive a direct response.
+We will use our discretion in determining when and how to follow up on reported
+incidents, which may range from not taking action to permanent expulsion from
+the project and project-sponsored spaces. We will notify the accused of the
+report and provide them an opportunity to discuss it before any action is taken.
+The identity of the reporter will be omitted from the details of the report
+supplied to the accused. In potentially harmful situations, such as ongoing
+harassment or threats to anyone's safety, we may take action without notice.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
+available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 7e25e6bc..7a6ff4a5 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -111,6 +111,16 @@ Coding Style
should point to the official ``googleapis`` checkout and the
the branch should be the main branch on that remote (``master``).
+- This repository contains configuration for the
+ `pre-commit `__ tool, which automates checking
+ our linters during a commit. If you have it installed on your ``$PATH``,
+ you can enable enforcing those checks via:
+
+.. code-block:: bash
+
+ $ pre-commit install
+ pre-commit installed at .git/hooks/pre-commit
+
Exceptions to PEP8:
- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for
diff --git a/docs/bigquery_datatransfer_v1/types.rst b/docs/bigquery_datatransfer_v1/types.rst
index e47782ff..ccda83a5 100644
--- a/docs/bigquery_datatransfer_v1/types.rst
+++ b/docs/bigquery_datatransfer_v1/types.rst
@@ -3,3 +3,4 @@ Types for Google Cloud Bigquery Datatransfer v1 API
.. automodule:: google.cloud.bigquery_datatransfer_v1.types
:members:
+ :show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
index fab78815..8e9ba316 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -345,10 +345,11 @@
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
- "python": ("http://python.readthedocs.org/en/latest/", None),
- "google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
+ "python": ("https://python.readthedocs.org/en/latest/", None),
+ "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
- "grpc": ("https://grpc.io/grpc/python/", None),
+ "grpc": ("https://grpc.github.io/grpc/python/", None),
+ "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
}
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py
index da83e37e..278ebbc5 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py
@@ -54,14 +54,58 @@ class DataTransferServiceAsyncClient:
DEFAULT_ENDPOINT = DataTransferServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = DataTransferServiceClient.DEFAULT_MTLS_ENDPOINT
+ data_source_path = staticmethod(DataTransferServiceClient.data_source_path)
+ parse_data_source_path = staticmethod(
+ DataTransferServiceClient.parse_data_source_path
+ )
+ run_path = staticmethod(DataTransferServiceClient.run_path)
+ parse_run_path = staticmethod(DataTransferServiceClient.parse_run_path)
transfer_config_path = staticmethod(DataTransferServiceClient.transfer_config_path)
parse_transfer_config_path = staticmethod(
DataTransferServiceClient.parse_transfer_config_path
)
+ common_billing_account_path = staticmethod(
+ DataTransferServiceClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ DataTransferServiceClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(DataTransferServiceClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ DataTransferServiceClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ DataTransferServiceClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ DataTransferServiceClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(DataTransferServiceClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ DataTransferServiceClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(DataTransferServiceClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ DataTransferServiceClient.parse_common_location_path
+ )
+
from_service_account_file = DataTransferServiceClient.from_service_account_file
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> DataTransferServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ DataTransferServiceTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(DataTransferServiceClient).get_transport_class,
type(DataTransferServiceClient),
@@ -156,7 +200,8 @@ async def get_data_source(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -179,7 +224,7 @@ async def get_data_source(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -241,7 +286,8 @@ async def list_data_sources(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -264,7 +310,7 @@ async def list_data_sources(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -348,7 +394,8 @@ async def create_transfer_config(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, transfer_config]):
+ has_flattened_params = any([parent, transfer_config])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -436,7 +483,8 @@ async def update_transfer_config(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([transfer_config, update_mask]):
+ has_flattened_params = any([transfer_config, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -509,7 +557,8 @@ async def delete_transfer_config(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -532,7 +581,7 @@ async def delete_transfer_config(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -594,7 +643,8 @@ async def get_transfer_config(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -617,7 +667,7 @@ async def get_transfer_config(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -678,7 +728,8 @@ async def list_transfer_configs(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -701,7 +752,7 @@ async def list_transfer_configs(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -782,7 +833,8 @@ async def schedule_transfer_runs(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, start_time, end_time]):
+ has_flattened_params = any([parent, start_time, end_time])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -913,7 +965,8 @@ async def get_transfer_run(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -936,7 +989,7 @@ async def get_transfer_run(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -989,7 +1042,8 @@ async def delete_transfer_run(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1012,7 +1066,7 @@ async def delete_transfer_run(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -1075,7 +1129,8 @@ async def list_transfer_runs(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1098,7 +1153,7 @@ async def list_transfer_runs(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -1166,7 +1221,8 @@ async def list_transfer_logs(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1189,7 +1245,7 @@ async def list_transfer_logs(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -1265,7 +1321,8 @@ async def check_valid_creds(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1288,7 +1345,7 @@ async def check_valid_creds(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py
index f4cd6199..38d47596 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py
@@ -19,10 +19,10 @@
from distutils import util
import os
import re
-from typing import Callable, Dict, Sequence, Tuple, Type, Union
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
@@ -142,6 +142,46 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> DataTransferServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ DataTransferServiceTransport: The transport used by the client instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def data_source_path(project: str, data_source: str,) -> str:
+ """Return a fully-qualified data_source string."""
+ return "projects/{project}/dataSources/{data_source}".format(
+ project=project, data_source=data_source,
+ )
+
+ @staticmethod
+ def parse_data_source_path(path: str) -> Dict[str, str]:
+ """Parse a data_source path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/dataSources/(?P.+?)$", path
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def run_path(project: str, transfer_config: str, run: str,) -> str:
+ """Return a fully-qualified run string."""
+ return "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format(
+ project=project, transfer_config=transfer_config, run=run,
+ )
+
+ @staticmethod
+ def parse_run_path(path: str) -> Dict[str, str]:
+ """Parse a run path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/transferConfigs/(?P.+?)/runs/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
@staticmethod
def transfer_config_path(project: str, transfer_config: str,) -> str:
"""Return a fully-qualified transfer_config string."""
@@ -158,12 +198,71 @@ def parse_transfer_config_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, DataTransferServiceTransport] = None,
- client_options: ClientOptions = None,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, DataTransferServiceTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the data transfer service client.
@@ -177,8 +276,8 @@ def __init__(
transport (Union[str, ~.DataTransferServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
@@ -193,10 +292,10 @@ def __init__(
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -204,9 +303,9 @@ def __init__(
creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
if client_options is None:
- client_options = ClientOptions.ClientOptions()
+ client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py
index 8d2f4569..3633ae8e 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py
@@ -115,7 +115,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -128,7 +128,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -151,7 +151,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -164,7 +164,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -177,7 +177,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -200,7 +200,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -213,7 +213,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -226,7 +226,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -239,7 +239,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
@@ -252,7 +252,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
- exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=20.0,
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py
index 8ac75a7c..442cdd27 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py
@@ -94,10 +94,10 @@ def __init__(
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -106,6 +106,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -113,6 +115,7 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
@@ -149,6 +152,7 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
@@ -226,12 +230,8 @@ def create_channel(
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Return the channel from cache.
return self._grpc_channel
@property
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py
index c4220d11..a65ac425 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py
@@ -151,6 +151,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -158,6 +160,7 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
@@ -194,6 +197,7 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
diff --git a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
index 2fab0699..a78d7e41 100644
--- a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
+++ b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
@@ -70,7 +70,7 @@ class DataSourceParameter(proto.Message):
Parameter display name in the user interface.
description (str):
Parameter description.
- type (~.datatransfer.DataSourceParameter.Type):
+ type_ (~.datatransfer.DataSourceParameter.Type):
Parameter type.
required (bool):
Is parameter required.
@@ -122,7 +122,7 @@ class Type(proto.Enum):
description = proto.Field(proto.STRING, number=3)
- type = proto.Field(proto.ENUM, number=4, enum=Type,)
+ type_ = proto.Field(proto.ENUM, number=4, enum=Type,)
required = proto.Field(proto.BOOL, number=5)
@@ -251,7 +251,7 @@ class DataRefreshType(proto.Enum):
supports_custom_schedule = proto.Field(proto.BOOL, number=11)
parameters = proto.RepeatedField(
- proto.MESSAGE, number=12, message=DataSourceParameter,
+ proto.MESSAGE, number=12, message="DataSourceParameter",
)
help_url = proto.Field(proto.STRING, number=13)
@@ -329,7 +329,7 @@ class ListDataSourcesResponse(proto.Message):
def raw_page(self):
return self
- data_sources = proto.RepeatedField(proto.MESSAGE, number=1, message=DataSource,)
+ data_sources = proto.RepeatedField(proto.MESSAGE, number=1, message="DataSource",)
next_page_token = proto.Field(proto.STRING, number=2)
diff --git a/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/google/cloud/bigquery_datatransfer_v1/types/transfer.py
index 86f085e4..e3d0d9ea 100644
--- a/google/cloud/bigquery_datatransfer_v1/types/transfer.py
+++ b/google/cloud/bigquery_datatransfer_v1/types/transfer.py
@@ -192,7 +192,7 @@ class TransferConfig(proto.Message):
schedule = proto.Field(proto.STRING, number=7)
- schedule_options = proto.Field(proto.MESSAGE, number=24, message=ScheduleOptions,)
+ schedule_options = proto.Field(proto.MESSAGE, number=24, message="ScheduleOptions",)
data_refresh_window_days = proto.Field(proto.INT32, number=12)
@@ -210,7 +210,9 @@ class TransferConfig(proto.Message):
notification_pubsub_topic = proto.Field(proto.STRING, number=15)
- email_preferences = proto.Field(proto.MESSAGE, number=18, message=EmailPreferences,)
+ email_preferences = proto.Field(
+ proto.MESSAGE, number=18, message="EmailPreferences",
+ )
class TransferRun(proto.Message):
@@ -299,7 +301,9 @@ class TransferRun(proto.Message):
notification_pubsub_topic = proto.Field(proto.STRING, number=23)
- email_preferences = proto.Field(proto.MESSAGE, number=25, message=EmailPreferences,)
+ email_preferences = proto.Field(
+ proto.MESSAGE, number=25, message="EmailPreferences",
+ )
class TransferMessage(proto.Message):
diff --git a/noxfile.py b/noxfile.py
index ee87f536..a4884a08 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -28,7 +28,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -72,16 +72,17 @@ def default(session):
# Install all test dependencies, then install this package in-place.
session.install("asyncmock", "pytest-asyncio")
- session.install("mock", "pytest", "pytest-cov")
+ session.install(
+ "mock", "pytest", "pytest-cov",
+ )
session.install("-e", ".")
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
- "--cov=google.cloud.bigquerydatatransfer",
- "--cov=google.cloud",
- "--cov=tests.unit",
+ "--cov=google/cloud",
+ "--cov=tests/unit",
"--cov-append",
"--cov-config=.coveragerc",
"--cov-report=",
diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md
new file mode 100644
index 00000000..55c97b32
--- /dev/null
+++ b/samples/AUTHORING_GUIDE.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md
\ No newline at end of file
diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md
new file mode 100644
index 00000000..34c882b6
--- /dev/null
+++ b/samples/CONTRIBUTING.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md
\ No newline at end of file
diff --git a/samples/noxfile.py b/samples/noxfile.py
index ba55d7ce..bca0522e 100644
--- a/samples/noxfile.py
+++ b/samples/noxfile.py
@@ -17,6 +17,7 @@
import os
from pathlib import Path
import sys
+from typing import Callable, Dict, List, Optional
import nox
@@ -39,6 +40,10 @@
# You can opt out from the test for specific Python versions.
'ignored_versions': ["2.7"],
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ 'enforce_type_hints': False,
+
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
@@ -64,7 +69,7 @@
TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
-def get_pytest_env_vars():
+def get_pytest_env_vars() -> Dict[str, str]:
"""Returns a dict for pytest invocation."""
ret = {}
@@ -93,7 +98,7 @@ def get_pytest_env_vars():
#
-def _determine_local_import_names(start_dir):
+def _determine_local_import_names(start_dir: str) -> List[str]:
"""Determines all import names that should be considered "local".
This is used when running the linter to insure that import order is
@@ -131,8 +136,11 @@ def _determine_local_import_names(start_dir):
@nox.session
-def lint(session):
- session.install("flake8", "flake8-import-order")
+def lint(session: nox.sessions.Session) -> None:
+ if not TEST_CONFIG['enforce_type_hints']:
+ session.install("flake8", "flake8-import-order")
+ else:
+ session.install("flake8", "flake8-import-order", "flake8-annotations")
local_names = _determine_local_import_names(".")
args = FLAKE8_COMMON_ARGS + [
@@ -141,8 +149,18 @@ def lint(session):
"."
]
session.run("flake8", *args)
+#
+# Black
+#
+@nox.session
+def blacken(session: nox.sessions.Session) -> None:
+ session.install("black")
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ session.run("black", *python_files)
+
#
# Sample Tests
#
@@ -151,7 +169,7 @@ def lint(session):
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-def _session_tests(session, post_install=None):
+def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
"""Runs py.test for a particular project."""
if os.path.exists("requirements.txt"):
session.install("-r", "requirements.txt")
@@ -177,7 +195,7 @@ def _session_tests(session, post_install=None):
@nox.session(python=ALL_VERSIONS)
-def py(session):
+def py(session: nox.sessions.Session) -> None:
"""Runs py.test for a sample using the specified version of Python."""
if session.python in TESTED_VERSIONS:
_session_tests(session)
@@ -192,7 +210,7 @@ def py(session):
#
-def _get_repo_root():
+def _get_repo_root() -> Optional[str]:
""" Returns the root folder of the project. """
# Get root of this repository. Assume we don't have directories nested deeper than 10 items.
p = Path(os.getcwd())
@@ -201,6 +219,11 @@ def _get_repo_root():
break
if Path(p / ".git").exists():
return str(p)
+ # .git is not available in repos cloned via Cloud Build
+ # setup.py is always in the library's root, so use that instead
+ # https://github.com/googleapis/synthtool/issues/792
+ if Path(p / "setup.py").exists():
+ return str(p)
p = p.parent
raise Exception("Unable to detect repository root.")
@@ -210,7 +233,7 @@ def _get_repo_root():
@nox.session
@nox.parametrize("path", GENERATED_READMES)
-def readmegen(session, path):
+def readmegen(session: nox.sessions.Session, path: str) -> None:
"""(Re-)generates the readme for a sample."""
session.install("jinja2", "pyyaml")
dir_ = os.path.dirname(path)
diff --git a/samples/requirements.txt b/samples/requirements.txt
index 0dbcec5a..1cd31695 100644
--- a/samples/requirements.txt
+++ b/samples/requirements.txt
@@ -1,2 +1,2 @@
-google-cloud-bigquery-datatransfer==1.1.1
+google-cloud-bigquery-datatransfer==2.1.0
google-cloud-bigquery
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index ba55d7ce..bca0522e 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -17,6 +17,7 @@
import os
from pathlib import Path
import sys
+from typing import Callable, Dict, List, Optional
import nox
@@ -39,6 +40,10 @@
# You can opt out from the test for specific Python versions.
'ignored_versions': ["2.7"],
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ 'enforce_type_hints': False,
+
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
@@ -64,7 +69,7 @@
TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
-def get_pytest_env_vars():
+def get_pytest_env_vars() -> Dict[str, str]:
"""Returns a dict for pytest invocation."""
ret = {}
@@ -93,7 +98,7 @@ def get_pytest_env_vars():
#
-def _determine_local_import_names(start_dir):
+def _determine_local_import_names(start_dir: str) -> List[str]:
"""Determines all import names that should be considered "local".
This is used when running the linter to insure that import order is
@@ -131,8 +136,11 @@ def _determine_local_import_names(start_dir):
@nox.session
-def lint(session):
- session.install("flake8", "flake8-import-order")
+def lint(session: nox.sessions.Session) -> None:
+ if not TEST_CONFIG['enforce_type_hints']:
+ session.install("flake8", "flake8-import-order")
+ else:
+ session.install("flake8", "flake8-import-order", "flake8-annotations")
local_names = _determine_local_import_names(".")
args = FLAKE8_COMMON_ARGS + [
@@ -141,8 +149,18 @@ def lint(session):
"."
]
session.run("flake8", *args)
+#
+# Black
+#
+@nox.session
+def blacken(session: nox.sessions.Session) -> None:
+ session.install("black")
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ session.run("black", *python_files)
+
#
# Sample Tests
#
@@ -151,7 +169,7 @@ def lint(session):
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-def _session_tests(session, post_install=None):
+def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
"""Runs py.test for a particular project."""
if os.path.exists("requirements.txt"):
session.install("-r", "requirements.txt")
@@ -177,7 +195,7 @@ def _session_tests(session, post_install=None):
@nox.session(python=ALL_VERSIONS)
-def py(session):
+def py(session: nox.sessions.Session) -> None:
"""Runs py.test for a sample using the specified version of Python."""
if session.python in TESTED_VERSIONS:
_session_tests(session)
@@ -192,7 +210,7 @@ def py(session):
#
-def _get_repo_root():
+def _get_repo_root() -> Optional[str]:
""" Returns the root folder of the project. """
# Get root of this repository. Assume we don't have directories nested deeper than 10 items.
p = Path(os.getcwd())
@@ -201,6 +219,11 @@ def _get_repo_root():
break
if Path(p / ".git").exists():
return str(p)
+ # .git is not available in repos cloned via Cloud Build
+ # setup.py is always in the library's root, so use that instead
+ # https://github.com/googleapis/synthtool/issues/792
+ if Path(p / "setup.py").exists():
+ return str(p)
p = p.parent
raise Exception("Unable to detect repository root.")
@@ -210,7 +233,7 @@ def _get_repo_root():
@nox.session
@nox.parametrize("path", GENERATED_READMES)
-def readmegen(session, path):
+def readmegen(session: nox.sessions.Session, path: str) -> None:
"""(Re-)generates the readme for a sample."""
session.install("jinja2", "pyyaml")
dir_ = os.path.dirname(path)
diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt
index af45f439..00c87c7c 100644
--- a/samples/snippets/requirements.txt
+++ b/samples/snippets/requirements.txt
@@ -1 +1 @@
-google-cloud-bigquery-datatransfer==1.1.1
+google-cloud-bigquery-datatransfer==2.1.0
diff --git a/samples/update_transfer_config.py b/samples/update_transfer_config.py
index 11e21ddb..3e6ed1e8 100644
--- a/samples/update_transfer_config.py
+++ b/samples/update_transfer_config.py
@@ -19,7 +19,7 @@
def sample_update_transfer_config(config_name, display_name):
- # [START bigquerydatatransfer_update_transfer_config]
+ # [START bigquerydatatransfer_update_config]
from google.cloud import bigquery_datatransfer
client = bigquery_datatransfer.DataTransferServiceClient()
@@ -37,7 +37,7 @@ def sample_update_transfer_config(config_name, display_name):
)
print("Transfer config updated for '{}'".format(response.name))
- # [END bigquerydatatransfer_update_transfer_config]
+ # [END bigquerydatatransfer_update_config]
# Return the config name for testing purposes, so that it can be deleted.
return response
diff --git a/scripts/fixup_bigquery_datatransfer_v1_keywords.py b/scripts/fixup_bigquery_datatransfer_v1_keywords.py
index a8fbf7c5..c32d8022 100644
--- a/scripts/fixup_bigquery_datatransfer_v1_keywords.py
+++ b/scripts/fixup_bigquery_datatransfer_v1_keywords.py
@@ -1,3 +1,4 @@
+#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
diff --git a/setup.py b/setup.py
index c67feddf..26c8ae7e 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
name = "google-cloud-bigquery-datatransfer"
description = "BigQuery Data Transfer API client library"
-version = "2.1.0"
+version = "3.0.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
diff --git a/synth.metadata b/synth.metadata
index 20b58a93..b02e1f7f 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,22 +3,30 @@
{
"git": {
"name": ".",
- "remote": "git@github.com:plamut/python-bigquery-datatransfer.git",
- "sha": "41256eec1994fbff48894c7055e6440b4e636628"
+ "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git",
+ "sha": "3fb982cc0d4df052495b267f2a7bd3e1c3ea1683"
+ }
+ },
+ {
+ "git": {
+ "name": "googleapis",
+ "remote": "https://github.com/googleapis/googleapis.git",
+ "sha": "3f87da2ed1ddc3566ef0810c4fc06a2682cc9f5f",
+ "internalRef": "343022252"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "da29da32b3a988457b49ae290112b74f14b713cc"
+ "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "da29da32b3a988457b49ae290112b74f14b713cc"
+ "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1"
}
}
],
@@ -32,5 +40,99 @@
"generator": "bazel"
}
}
+ ],
+ "generatedFiles": [
+ ".flake8",
+ ".github/CONTRIBUTING.md",
+ ".github/ISSUE_TEMPLATE/bug_report.md",
+ ".github/ISSUE_TEMPLATE/feature_request.md",
+ ".github/ISSUE_TEMPLATE/support_request.md",
+ ".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/release-please.yml",
+ ".github/snippet-bot.yml",
+ ".gitignore",
+ ".kokoro/build.sh",
+ ".kokoro/continuous/common.cfg",
+ ".kokoro/continuous/continuous.cfg",
+ ".kokoro/docker/docs/Dockerfile",
+ ".kokoro/docker/docs/fetch_gpg_keys.sh",
+ ".kokoro/docs/common.cfg",
+ ".kokoro/docs/docs-presubmit.cfg",
+ ".kokoro/docs/docs.cfg",
+ ".kokoro/populate-secrets.sh",
+ ".kokoro/presubmit/common.cfg",
+ ".kokoro/presubmit/presubmit.cfg",
+ ".kokoro/publish-docs.sh",
+ ".kokoro/release.sh",
+ ".kokoro/release/common.cfg",
+ ".kokoro/release/release.cfg",
+ ".kokoro/samples/lint/common.cfg",
+ ".kokoro/samples/lint/continuous.cfg",
+ ".kokoro/samples/lint/periodic.cfg",
+ ".kokoro/samples/lint/presubmit.cfg",
+ ".kokoro/samples/python3.6/common.cfg",
+ ".kokoro/samples/python3.6/continuous.cfg",
+ ".kokoro/samples/python3.6/periodic.cfg",
+ ".kokoro/samples/python3.6/presubmit.cfg",
+ ".kokoro/samples/python3.7/common.cfg",
+ ".kokoro/samples/python3.7/continuous.cfg",
+ ".kokoro/samples/python3.7/periodic.cfg",
+ ".kokoro/samples/python3.7/presubmit.cfg",
+ ".kokoro/samples/python3.8/common.cfg",
+ ".kokoro/samples/python3.8/continuous.cfg",
+ ".kokoro/samples/python3.8/periodic.cfg",
+ ".kokoro/samples/python3.8/presubmit.cfg",
+ ".kokoro/test-samples.sh",
+ ".kokoro/trampoline.sh",
+ ".kokoro/trampoline_v2.sh",
+ ".pre-commit-config.yaml",
+ ".trampolinerc",
+ "CODE_OF_CONDUCT.md",
+ "CONTRIBUTING.rst",
+ "LICENSE",
+ "MANIFEST.in",
+ "docs/_static/custom.css",
+ "docs/_templates/layout.html",
+ "docs/bigquery_datatransfer_v1/services.rst",
+ "docs/bigquery_datatransfer_v1/types.rst",
+ "docs/conf.py",
+ "docs/multiprocessing.rst",
+ "google/cloud/bigquery_datatransfer/__init__.py",
+ "google/cloud/bigquery_datatransfer/py.typed",
+ "google/cloud/bigquery_datatransfer_v1/__init__.py",
+ "google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto",
+ "google/cloud/bigquery_datatransfer_v1/proto/transfer.proto",
+ "google/cloud/bigquery_datatransfer_v1/py.typed",
+ "google/cloud/bigquery_datatransfer_v1/services/__init__.py",
+ "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py",
+ "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py",
+ "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py",
+ "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py",
+ "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py",
+ "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py",
+ "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py",
+ "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py",
+ "google/cloud/bigquery_datatransfer_v1/types/__init__.py",
+ "google/cloud/bigquery_datatransfer_v1/types/datatransfer.py",
+ "google/cloud/bigquery_datatransfer_v1/types/transfer.py",
+ "mypy.ini",
+ "noxfile.py",
+ "renovate.json",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/noxfile.py",
+ "samples/snippets/noxfile.py",
+ "scripts/decrypt-secrets.sh",
+ "scripts/fixup_bigquery_datatransfer_v1_keywords.py",
+ "scripts/readme-gen/readme_gen.py",
+ "scripts/readme-gen/templates/README.tmpl.rst",
+ "scripts/readme-gen/templates/auth.tmpl.rst",
+ "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
+ "scripts/readme-gen/templates/install_deps.tmpl.rst",
+ "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
+ "setup.cfg",
+ "testing/.gitignore",
+ "tests/unit/gapic/bigquery_datatransfer_v1/__init__.py",
+ "tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py"
]
}
\ No newline at end of file
diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py
index 63e74a84..1596cfad 100644
--- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py
+++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py
@@ -107,12 +107,12 @@ def test_data_transfer_service_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
- assert client._transport._host == "bigquerydatatransfer.googleapis.com:443"
+ assert client.transport._host == "bigquerydatatransfer.googleapis.com:443"
def test_data_transfer_service_client_get_transport_class():
@@ -484,7 +484,7 @@ def test_get_data_source(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_data_source), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_data_source), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.DataSource(
name="name_value",
@@ -514,6 +514,7 @@ def test_get_data_source(
assert args[0] == datatransfer.GetDataSourceRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datatransfer.DataSource)
assert response.name == "name_value"
@@ -560,19 +561,19 @@ def test_get_data_source_from_dict():
@pytest.mark.asyncio
-async def test_get_data_source_async(transport: str = "grpc_asyncio"):
+async def test_get_data_source_async(
+ transport: str = "grpc_asyncio", request_type=datatransfer.GetDataSourceRequest
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.GetDataSourceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_data_source), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_data_source), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.DataSource(
@@ -601,7 +602,7 @@ async def test_get_data_source_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.GetDataSourceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datatransfer.DataSource)
@@ -645,6 +646,11 @@ async def test_get_data_source_async(transport: str = "grpc_asyncio"):
assert response.manual_runs_disabled is True
+@pytest.mark.asyncio
+async def test_get_data_source_async_from_dict():
+ await test_get_data_source_async(request_type=dict)
+
+
def test_get_data_source_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -654,7 +660,7 @@ def test_get_data_source_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_data_source), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_data_source), "__call__") as call:
call.return_value = datatransfer.DataSource()
client.get_data_source(request)
@@ -681,9 +687,7 @@ async def test_get_data_source_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_data_source), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_data_source), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.DataSource()
)
@@ -704,7 +708,7 @@ def test_get_data_source_flattened():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_data_source), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_data_source), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.DataSource()
@@ -738,9 +742,7 @@ async def test_get_data_source_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_data_source), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_data_source), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.DataSource()
@@ -786,7 +788,7 @@ def test_list_data_sources(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_data_sources), "__call__"
+ type(client.transport.list_data_sources), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListDataSourcesResponse(
@@ -802,6 +804,7 @@ def test_list_data_sources(
assert args[0] == datatransfer.ListDataSourcesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListDataSourcesPager)
assert response.next_page_token == "next_page_token_value"
@@ -812,18 +815,20 @@ def test_list_data_sources_from_dict():
@pytest.mark.asyncio
-async def test_list_data_sources_async(transport: str = "grpc_asyncio"):
+async def test_list_data_sources_async(
+ transport: str = "grpc_asyncio", request_type=datatransfer.ListDataSourcesRequest
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.ListDataSourcesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_data_sources), "__call__"
+ type(client.transport.list_data_sources), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -838,7 +843,7 @@ async def test_list_data_sources_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.ListDataSourcesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListDataSourcesAsyncPager)
@@ -846,6 +851,11 @@ async def test_list_data_sources_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_data_sources_async_from_dict():
+ await test_list_data_sources_async(request_type=dict)
+
+
def test_list_data_sources_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -856,7 +866,7 @@ def test_list_data_sources_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_data_sources), "__call__"
+ type(client.transport.list_data_sources), "__call__"
) as call:
call.return_value = datatransfer.ListDataSourcesResponse()
@@ -885,7 +895,7 @@ async def test_list_data_sources_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_data_sources), "__call__"
+ type(client.transport.list_data_sources), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ListDataSourcesResponse()
@@ -908,7 +918,7 @@ def test_list_data_sources_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_data_sources), "__call__"
+ type(client.transport.list_data_sources), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListDataSourcesResponse()
@@ -944,7 +954,7 @@ async def test_list_data_sources_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_data_sources), "__call__"
+ type(client.transport.list_data_sources), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListDataSourcesResponse()
@@ -983,7 +993,7 @@ def test_list_data_sources_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_data_sources), "__call__"
+ type(client.transport.list_data_sources), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1025,7 +1035,7 @@ def test_list_data_sources_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_data_sources), "__call__"
+ type(client.transport.list_data_sources), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1061,7 +1071,7 @@ async def test_list_data_sources_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_data_sources),
+ type(client.transport.list_data_sources),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -1104,7 +1114,7 @@ async def test_list_data_sources_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_data_sources),
+ type(client.transport.list_data_sources),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -1149,7 +1159,7 @@ def test_create_transfer_config(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_transfer_config), "__call__"
+ type(client.transport.create_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig(
@@ -1175,6 +1185,7 @@ def test_create_transfer_config(
assert args[0] == datatransfer.CreateTransferConfigRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, transfer.TransferConfig)
assert response.name == "name_value"
@@ -1203,18 +1214,21 @@ def test_create_transfer_config_from_dict():
@pytest.mark.asyncio
-async def test_create_transfer_config_async(transport: str = "grpc_asyncio"):
+async def test_create_transfer_config_async(
+ transport: str = "grpc_asyncio",
+ request_type=datatransfer.CreateTransferConfigRequest,
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.CreateTransferConfigRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_transfer_config), "__call__"
+ type(client.transport.create_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1238,7 +1252,7 @@ async def test_create_transfer_config_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.CreateTransferConfigRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, transfer.TransferConfig)
@@ -1264,6 +1278,11 @@ async def test_create_transfer_config_async(transport: str = "grpc_asyncio"):
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
+@pytest.mark.asyncio
+async def test_create_transfer_config_async_from_dict():
+ await test_create_transfer_config_async(request_type=dict)
+
+
def test_create_transfer_config_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1274,7 +1293,7 @@ def test_create_transfer_config_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_transfer_config), "__call__"
+ type(client.transport.create_transfer_config), "__call__"
) as call:
call.return_value = transfer.TransferConfig()
@@ -1303,7 +1322,7 @@ async def test_create_transfer_config_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_transfer_config), "__call__"
+ type(client.transport.create_transfer_config), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
transfer.TransferConfig()
@@ -1326,7 +1345,7 @@ def test_create_transfer_config_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_transfer_config), "__call__"
+ type(client.transport.create_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig()
@@ -1369,7 +1388,7 @@ async def test_create_transfer_config_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_transfer_config), "__call__"
+ type(client.transport.create_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig()
@@ -1423,7 +1442,7 @@ def test_update_transfer_config(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_transfer_config), "__call__"
+ type(client.transport.update_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig(
@@ -1449,6 +1468,7 @@ def test_update_transfer_config(
assert args[0] == datatransfer.UpdateTransferConfigRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, transfer.TransferConfig)
assert response.name == "name_value"
@@ -1477,18 +1497,21 @@ def test_update_transfer_config_from_dict():
@pytest.mark.asyncio
-async def test_update_transfer_config_async(transport: str = "grpc_asyncio"):
+async def test_update_transfer_config_async(
+ transport: str = "grpc_asyncio",
+ request_type=datatransfer.UpdateTransferConfigRequest,
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.UpdateTransferConfigRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_transfer_config), "__call__"
+ type(client.transport.update_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1512,7 +1535,7 @@ async def test_update_transfer_config_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.UpdateTransferConfigRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, transfer.TransferConfig)
@@ -1538,6 +1561,11 @@ async def test_update_transfer_config_async(transport: str = "grpc_asyncio"):
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
+@pytest.mark.asyncio
+async def test_update_transfer_config_async_from_dict():
+ await test_update_transfer_config_async(request_type=dict)
+
+
def test_update_transfer_config_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1548,7 +1576,7 @@ def test_update_transfer_config_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_transfer_config), "__call__"
+ type(client.transport.update_transfer_config), "__call__"
) as call:
call.return_value = transfer.TransferConfig()
@@ -1580,7 +1608,7 @@ async def test_update_transfer_config_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_transfer_config), "__call__"
+ type(client.transport.update_transfer_config), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
transfer.TransferConfig()
@@ -1606,7 +1634,7 @@ def test_update_transfer_config_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_transfer_config), "__call__"
+ type(client.transport.update_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig()
@@ -1649,7 +1677,7 @@ async def test_update_transfer_config_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_transfer_config), "__call__"
+ type(client.transport.update_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig()
@@ -1703,7 +1731,7 @@ def test_delete_transfer_config(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_transfer_config), "__call__"
+ type(client.transport.delete_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1725,18 +1753,21 @@ def test_delete_transfer_config_from_dict():
@pytest.mark.asyncio
-async def test_delete_transfer_config_async(transport: str = "grpc_asyncio"):
+async def test_delete_transfer_config_async(
+ transport: str = "grpc_asyncio",
+ request_type=datatransfer.DeleteTransferConfigRequest,
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.DeleteTransferConfigRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_transfer_config), "__call__"
+ type(client.transport.delete_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1747,12 +1778,17 @@ async def test_delete_transfer_config_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.DeleteTransferConfigRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_transfer_config_async_from_dict():
+ await test_delete_transfer_config_async(request_type=dict)
+
+
def test_delete_transfer_config_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1763,7 +1799,7 @@ def test_delete_transfer_config_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_transfer_config), "__call__"
+ type(client.transport.delete_transfer_config), "__call__"
) as call:
call.return_value = None
@@ -1792,7 +1828,7 @@ async def test_delete_transfer_config_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_transfer_config), "__call__"
+ type(client.transport.delete_transfer_config), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1813,7 +1849,7 @@ def test_delete_transfer_config_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_transfer_config), "__call__"
+ type(client.transport.delete_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1849,7 +1885,7 @@ async def test_delete_transfer_config_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_transfer_config), "__call__"
+ type(client.transport.delete_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1894,7 +1930,7 @@ def test_get_transfer_config(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.get_transfer_config), "__call__"
+ type(client.transport.get_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig(
@@ -1920,6 +1956,7 @@ def test_get_transfer_config(
assert args[0] == datatransfer.GetTransferConfigRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, transfer.TransferConfig)
assert response.name == "name_value"
@@ -1948,18 +1985,20 @@ def test_get_transfer_config_from_dict():
@pytest.mark.asyncio
-async def test_get_transfer_config_async(transport: str = "grpc_asyncio"):
+async def test_get_transfer_config_async(
+ transport: str = "grpc_asyncio", request_type=datatransfer.GetTransferConfigRequest
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.GetTransferConfigRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.get_transfer_config), "__call__"
+ type(client.transport.get_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1983,7 +2022,7 @@ async def test_get_transfer_config_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.GetTransferConfigRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, transfer.TransferConfig)
@@ -2009,6 +2048,11 @@ async def test_get_transfer_config_async(transport: str = "grpc_asyncio"):
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
+@pytest.mark.asyncio
+async def test_get_transfer_config_async_from_dict():
+ await test_get_transfer_config_async(request_type=dict)
+
+
def test_get_transfer_config_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2019,7 +2063,7 @@ def test_get_transfer_config_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.get_transfer_config), "__call__"
+ type(client.transport.get_transfer_config), "__call__"
) as call:
call.return_value = transfer.TransferConfig()
@@ -2048,7 +2092,7 @@ async def test_get_transfer_config_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.get_transfer_config), "__call__"
+ type(client.transport.get_transfer_config), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
transfer.TransferConfig()
@@ -2071,7 +2115,7 @@ def test_get_transfer_config_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.get_transfer_config), "__call__"
+ type(client.transport.get_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig()
@@ -2107,7 +2151,7 @@ async def test_get_transfer_config_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.get_transfer_config), "__call__"
+ type(client.transport.get_transfer_config), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferConfig()
@@ -2154,7 +2198,7 @@ def test_list_transfer_configs(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_configs), "__call__"
+ type(client.transport.list_transfer_configs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferConfigsResponse(
@@ -2170,6 +2214,7 @@ def test_list_transfer_configs(
assert args[0] == datatransfer.ListTransferConfigsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListTransferConfigsPager)
assert response.next_page_token == "next_page_token_value"
@@ -2180,18 +2225,21 @@ def test_list_transfer_configs_from_dict():
@pytest.mark.asyncio
-async def test_list_transfer_configs_async(transport: str = "grpc_asyncio"):
+async def test_list_transfer_configs_async(
+ transport: str = "grpc_asyncio",
+ request_type=datatransfer.ListTransferConfigsRequest,
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.ListTransferConfigsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_configs), "__call__"
+ type(client.transport.list_transfer_configs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -2206,7 +2254,7 @@ async def test_list_transfer_configs_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.ListTransferConfigsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTransferConfigsAsyncPager)
@@ -2214,6 +2262,11 @@ async def test_list_transfer_configs_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_transfer_configs_async_from_dict():
+ await test_list_transfer_configs_async(request_type=dict)
+
+
def test_list_transfer_configs_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2224,7 +2277,7 @@ def test_list_transfer_configs_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_configs), "__call__"
+ type(client.transport.list_transfer_configs), "__call__"
) as call:
call.return_value = datatransfer.ListTransferConfigsResponse()
@@ -2253,7 +2306,7 @@ async def test_list_transfer_configs_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_configs), "__call__"
+ type(client.transport.list_transfer_configs), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ListTransferConfigsResponse()
@@ -2276,7 +2329,7 @@ def test_list_transfer_configs_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_configs), "__call__"
+ type(client.transport.list_transfer_configs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferConfigsResponse()
@@ -2312,7 +2365,7 @@ async def test_list_transfer_configs_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_configs), "__call__"
+ type(client.transport.list_transfer_configs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferConfigsResponse()
@@ -2351,7 +2404,7 @@ def test_list_transfer_configs_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_configs), "__call__"
+ type(client.transport.list_transfer_configs), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -2396,7 +2449,7 @@ def test_list_transfer_configs_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_configs), "__call__"
+ type(client.transport.list_transfer_configs), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -2435,7 +2488,7 @@ async def test_list_transfer_configs_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_configs),
+ type(client.transport.list_transfer_configs),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -2481,7 +2534,7 @@ async def test_list_transfer_configs_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_configs),
+ type(client.transport.list_transfer_configs),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -2529,7 +2582,7 @@ def test_schedule_transfer_runs(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.schedule_transfer_runs), "__call__"
+ type(client.transport.schedule_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ScheduleTransferRunsResponse()
@@ -2543,6 +2596,7 @@ def test_schedule_transfer_runs(
assert args[0] == datatransfer.ScheduleTransferRunsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datatransfer.ScheduleTransferRunsResponse)
@@ -2551,18 +2605,21 @@ def test_schedule_transfer_runs_from_dict():
@pytest.mark.asyncio
-async def test_schedule_transfer_runs_async(transport: str = "grpc_asyncio"):
+async def test_schedule_transfer_runs_async(
+ transport: str = "grpc_asyncio",
+ request_type=datatransfer.ScheduleTransferRunsRequest,
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.ScheduleTransferRunsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.schedule_transfer_runs), "__call__"
+ type(client.transport.schedule_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -2575,12 +2632,17 @@ async def test_schedule_transfer_runs_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.ScheduleTransferRunsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datatransfer.ScheduleTransferRunsResponse)
+@pytest.mark.asyncio
+async def test_schedule_transfer_runs_async_from_dict():
+ await test_schedule_transfer_runs_async(request_type=dict)
+
+
def test_schedule_transfer_runs_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2591,7 +2653,7 @@ def test_schedule_transfer_runs_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.schedule_transfer_runs), "__call__"
+ type(client.transport.schedule_transfer_runs), "__call__"
) as call:
call.return_value = datatransfer.ScheduleTransferRunsResponse()
@@ -2620,7 +2682,7 @@ async def test_schedule_transfer_runs_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.schedule_transfer_runs), "__call__"
+ type(client.transport.schedule_transfer_runs), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ScheduleTransferRunsResponse()
@@ -2643,7 +2705,7 @@ def test_schedule_transfer_runs_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.schedule_transfer_runs), "__call__"
+ type(client.transport.schedule_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ScheduleTransferRunsResponse()
@@ -2694,7 +2756,7 @@ async def test_schedule_transfer_runs_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.schedule_transfer_runs), "__call__"
+ type(client.transport.schedule_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ScheduleTransferRunsResponse()
@@ -2756,7 +2818,7 @@ def test_start_manual_transfer_runs(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.start_manual_transfer_runs), "__call__"
+ type(client.transport.start_manual_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.StartManualTransferRunsResponse()
@@ -2770,6 +2832,7 @@ def test_start_manual_transfer_runs(
assert args[0] == datatransfer.StartManualTransferRunsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datatransfer.StartManualTransferRunsResponse)
@@ -2778,18 +2841,21 @@ def test_start_manual_transfer_runs_from_dict():
@pytest.mark.asyncio
-async def test_start_manual_transfer_runs_async(transport: str = "grpc_asyncio"):
+async def test_start_manual_transfer_runs_async(
+ transport: str = "grpc_asyncio",
+ request_type=datatransfer.StartManualTransferRunsRequest,
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.StartManualTransferRunsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.start_manual_transfer_runs), "__call__"
+ type(client.transport.start_manual_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -2802,12 +2868,17 @@ async def test_start_manual_transfer_runs_async(transport: str = "grpc_asyncio")
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.StartManualTransferRunsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datatransfer.StartManualTransferRunsResponse)
+@pytest.mark.asyncio
+async def test_start_manual_transfer_runs_async_from_dict():
+ await test_start_manual_transfer_runs_async(request_type=dict)
+
+
def test_start_manual_transfer_runs_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2818,7 +2889,7 @@ def test_start_manual_transfer_runs_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.start_manual_transfer_runs), "__call__"
+ type(client.transport.start_manual_transfer_runs), "__call__"
) as call:
call.return_value = datatransfer.StartManualTransferRunsResponse()
@@ -2847,7 +2918,7 @@ async def test_start_manual_transfer_runs_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.start_manual_transfer_runs), "__call__"
+ type(client.transport.start_manual_transfer_runs), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.StartManualTransferRunsResponse()
@@ -2877,9 +2948,7 @@ def test_get_transfer_run(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_transfer_run), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferRun(
name="name_value",
@@ -2900,6 +2969,7 @@ def test_get_transfer_run(
assert args[0] == datatransfer.GetTransferRunRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, transfer.TransferRun)
assert response.name == "name_value"
@@ -2920,19 +2990,19 @@ def test_get_transfer_run_from_dict():
@pytest.mark.asyncio
-async def test_get_transfer_run_async(transport: str = "grpc_asyncio"):
+async def test_get_transfer_run_async(
+ transport: str = "grpc_asyncio", request_type=datatransfer.GetTransferRunRequest
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.GetTransferRunRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_transfer_run), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
transfer.TransferRun(
@@ -2951,7 +3021,7 @@ async def test_get_transfer_run_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.GetTransferRunRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, transfer.TransferRun)
@@ -2969,6 +3039,11 @@ async def test_get_transfer_run_async(transport: str = "grpc_asyncio"):
assert response.notification_pubsub_topic == "notification_pubsub_topic_value"
+@pytest.mark.asyncio
+async def test_get_transfer_run_async_from_dict():
+ await test_get_transfer_run_async(request_type=dict)
+
+
def test_get_transfer_run_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2978,9 +3053,7 @@ def test_get_transfer_run_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_transfer_run), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call:
call.return_value = transfer.TransferRun()
client.get_transfer_run(request)
@@ -3007,9 +3080,7 @@ async def test_get_transfer_run_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_transfer_run), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
transfer.TransferRun()
)
@@ -3030,9 +3101,7 @@ def test_get_transfer_run_flattened():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_transfer_run), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferRun()
@@ -3066,9 +3135,7 @@ async def test_get_transfer_run_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_transfer_run), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = transfer.TransferRun()
@@ -3114,7 +3181,7 @@ def test_delete_transfer_run(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_transfer_run), "__call__"
+ type(client.transport.delete_transfer_run), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -3136,18 +3203,20 @@ def test_delete_transfer_run_from_dict():
@pytest.mark.asyncio
-async def test_delete_transfer_run_async(transport: str = "grpc_asyncio"):
+async def test_delete_transfer_run_async(
+ transport: str = "grpc_asyncio", request_type=datatransfer.DeleteTransferRunRequest
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.DeleteTransferRunRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_transfer_run), "__call__"
+ type(client.transport.delete_transfer_run), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -3158,12 +3227,17 @@ async def test_delete_transfer_run_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.DeleteTransferRunRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_transfer_run_async_from_dict():
+ await test_delete_transfer_run_async(request_type=dict)
+
+
def test_delete_transfer_run_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -3174,7 +3248,7 @@ def test_delete_transfer_run_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_transfer_run), "__call__"
+ type(client.transport.delete_transfer_run), "__call__"
) as call:
call.return_value = None
@@ -3203,7 +3277,7 @@ async def test_delete_transfer_run_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_transfer_run), "__call__"
+ type(client.transport.delete_transfer_run), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -3224,7 +3298,7 @@ def test_delete_transfer_run_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_transfer_run), "__call__"
+ type(client.transport.delete_transfer_run), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -3260,7 +3334,7 @@ async def test_delete_transfer_run_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_transfer_run), "__call__"
+ type(client.transport.delete_transfer_run), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -3305,7 +3379,7 @@ def test_list_transfer_runs(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_runs), "__call__"
+ type(client.transport.list_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferRunsResponse(
@@ -3321,6 +3395,7 @@ def test_list_transfer_runs(
assert args[0] == datatransfer.ListTransferRunsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListTransferRunsPager)
assert response.next_page_token == "next_page_token_value"
@@ -3331,18 +3406,20 @@ def test_list_transfer_runs_from_dict():
@pytest.mark.asyncio
-async def test_list_transfer_runs_async(transport: str = "grpc_asyncio"):
+async def test_list_transfer_runs_async(
+ transport: str = "grpc_asyncio", request_type=datatransfer.ListTransferRunsRequest
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.ListTransferRunsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_runs), "__call__"
+ type(client.transport.list_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -3357,7 +3434,7 @@ async def test_list_transfer_runs_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.ListTransferRunsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTransferRunsAsyncPager)
@@ -3365,6 +3442,11 @@ async def test_list_transfer_runs_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_transfer_runs_async_from_dict():
+ await test_list_transfer_runs_async(request_type=dict)
+
+
def test_list_transfer_runs_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -3375,7 +3457,7 @@ def test_list_transfer_runs_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_runs), "__call__"
+ type(client.transport.list_transfer_runs), "__call__"
) as call:
call.return_value = datatransfer.ListTransferRunsResponse()
@@ -3404,7 +3486,7 @@ async def test_list_transfer_runs_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_runs), "__call__"
+ type(client.transport.list_transfer_runs), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ListTransferRunsResponse()
@@ -3427,7 +3509,7 @@ def test_list_transfer_runs_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_runs), "__call__"
+ type(client.transport.list_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferRunsResponse()
@@ -3463,7 +3545,7 @@ async def test_list_transfer_runs_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_runs), "__call__"
+ type(client.transport.list_transfer_runs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferRunsResponse()
@@ -3502,7 +3584,7 @@ def test_list_transfer_runs_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_runs), "__call__"
+ type(client.transport.list_transfer_runs), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -3544,7 +3626,7 @@ def test_list_transfer_runs_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_runs), "__call__"
+ type(client.transport.list_transfer_runs), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -3580,7 +3662,7 @@ async def test_list_transfer_runs_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_runs),
+ type(client.transport.list_transfer_runs),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -3623,7 +3705,7 @@ async def test_list_transfer_runs_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_runs),
+ type(client.transport.list_transfer_runs),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -3668,7 +3750,7 @@ def test_list_transfer_logs(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_logs), "__call__"
+ type(client.transport.list_transfer_logs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferLogsResponse(
@@ -3684,6 +3766,7 @@ def test_list_transfer_logs(
assert args[0] == datatransfer.ListTransferLogsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListTransferLogsPager)
assert response.next_page_token == "next_page_token_value"
@@ -3694,18 +3777,20 @@ def test_list_transfer_logs_from_dict():
@pytest.mark.asyncio
-async def test_list_transfer_logs_async(transport: str = "grpc_asyncio"):
+async def test_list_transfer_logs_async(
+ transport: str = "grpc_asyncio", request_type=datatransfer.ListTransferLogsRequest
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.ListTransferLogsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_logs), "__call__"
+ type(client.transport.list_transfer_logs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -3720,7 +3805,7 @@ async def test_list_transfer_logs_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.ListTransferLogsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTransferLogsAsyncPager)
@@ -3728,6 +3813,11 @@ async def test_list_transfer_logs_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_transfer_logs_async_from_dict():
+ await test_list_transfer_logs_async(request_type=dict)
+
+
def test_list_transfer_logs_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -3738,7 +3828,7 @@ def test_list_transfer_logs_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_logs), "__call__"
+ type(client.transport.list_transfer_logs), "__call__"
) as call:
call.return_value = datatransfer.ListTransferLogsResponse()
@@ -3767,7 +3857,7 @@ async def test_list_transfer_logs_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_logs), "__call__"
+ type(client.transport.list_transfer_logs), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.ListTransferLogsResponse()
@@ -3790,7 +3880,7 @@ def test_list_transfer_logs_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_logs), "__call__"
+ type(client.transport.list_transfer_logs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferLogsResponse()
@@ -3826,7 +3916,7 @@ async def test_list_transfer_logs_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_logs), "__call__"
+ type(client.transport.list_transfer_logs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.ListTransferLogsResponse()
@@ -3865,7 +3955,7 @@ def test_list_transfer_logs_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_logs), "__call__"
+ type(client.transport.list_transfer_logs), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -3910,7 +4000,7 @@ def test_list_transfer_logs_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_transfer_logs), "__call__"
+ type(client.transport.list_transfer_logs), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -3949,7 +4039,7 @@ async def test_list_transfer_logs_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_logs),
+ type(client.transport.list_transfer_logs),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -3995,7 +4085,7 @@ async def test_list_transfer_logs_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_transfer_logs),
+ type(client.transport.list_transfer_logs),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -4043,7 +4133,7 @@ def test_check_valid_creds(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.check_valid_creds), "__call__"
+ type(client.transport.check_valid_creds), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.CheckValidCredsResponse(has_valid_creds=True,)
@@ -4057,6 +4147,7 @@ def test_check_valid_creds(
assert args[0] == datatransfer.CheckValidCredsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datatransfer.CheckValidCredsResponse)
assert response.has_valid_creds is True
@@ -4067,18 +4158,20 @@ def test_check_valid_creds_from_dict():
@pytest.mark.asyncio
-async def test_check_valid_creds_async(transport: str = "grpc_asyncio"):
+async def test_check_valid_creds_async(
+ transport: str = "grpc_asyncio", request_type=datatransfer.CheckValidCredsRequest
+):
client = DataTransferServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datatransfer.CheckValidCredsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.check_valid_creds), "__call__"
+ type(client.transport.check_valid_creds), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -4091,7 +4184,7 @@ async def test_check_valid_creds_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datatransfer.CheckValidCredsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datatransfer.CheckValidCredsResponse)
@@ -4099,6 +4192,11 @@ async def test_check_valid_creds_async(transport: str = "grpc_asyncio"):
assert response.has_valid_creds is True
+@pytest.mark.asyncio
+async def test_check_valid_creds_async_from_dict():
+ await test_check_valid_creds_async(request_type=dict)
+
+
def test_check_valid_creds_field_headers():
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -4109,7 +4207,7 @@ def test_check_valid_creds_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.check_valid_creds), "__call__"
+ type(client.transport.check_valid_creds), "__call__"
) as call:
call.return_value = datatransfer.CheckValidCredsResponse()
@@ -4138,7 +4236,7 @@ async def test_check_valid_creds_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.check_valid_creds), "__call__"
+ type(client.transport.check_valid_creds), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datatransfer.CheckValidCredsResponse()
@@ -4161,7 +4259,7 @@ def test_check_valid_creds_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.check_valid_creds), "__call__"
+ type(client.transport.check_valid_creds), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.CheckValidCredsResponse()
@@ -4197,7 +4295,7 @@ async def test_check_valid_creds_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.check_valid_creds), "__call__"
+ type(client.transport.check_valid_creds), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datatransfer.CheckValidCredsResponse()
@@ -4267,7 +4365,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = DataTransferServiceClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -4303,7 +4401,7 @@ def test_transport_adc(transport_class):
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.DataTransferServiceGrpcTransport,)
+ assert isinstance(client.transport, transports.DataTransferServiceGrpcTransport,)
def test_data_transfer_service_base_transport_error():
@@ -4410,7 +4508,7 @@ def test_data_transfer_service_host_no_port():
api_endpoint="bigquerydatatransfer.googleapis.com"
),
)
- assert client._transport._host == "bigquerydatatransfer.googleapis.com:443"
+ assert client.transport._host == "bigquerydatatransfer.googleapis.com:443"
def test_data_transfer_service_host_with_port():
@@ -4420,7 +4518,7 @@ def test_data_transfer_service_host_with_port():
api_endpoint="bigquerydatatransfer.googleapis.com:8000"
),
)
- assert client._transport._host == "bigquerydatatransfer.googleapis.com:8000"
+ assert client.transport._host == "bigquerydatatransfer.googleapis.com:8000"
def test_data_transfer_service_grpc_transport_channel():
@@ -4432,6 +4530,7 @@ def test_data_transfer_service_grpc_transport_channel():
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
def test_data_transfer_service_grpc_asyncio_transport_channel():
@@ -4443,6 +4542,7 @@ def test_data_transfer_service_grpc_asyncio_transport_channel():
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
@pytest.mark.parametrize(
@@ -4490,6 +4590,7 @@ def test_data_transfer_service_transport_channel_mtls_with_client_cert_source(
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
@pytest.mark.parametrize(
@@ -4532,9 +4633,57 @@ def test_data_transfer_service_transport_channel_mtls_with_adc(transport_class):
assert transport.grpc_channel == mock_grpc_channel
-def test_transfer_config_path():
+def test_data_source_path():
project = "squid"
- transfer_config = "clam"
+ data_source = "clam"
+
+ expected = "projects/{project}/dataSources/{data_source}".format(
+ project=project, data_source=data_source,
+ )
+ actual = DataTransferServiceClient.data_source_path(project, data_source)
+ assert expected == actual
+
+
+def test_parse_data_source_path():
+ expected = {
+ "project": "whelk",
+ "data_source": "octopus",
+ }
+ path = DataTransferServiceClient.data_source_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataTransferServiceClient.parse_data_source_path(path)
+ assert expected == actual
+
+
+def test_run_path():
+ project = "oyster"
+ transfer_config = "nudibranch"
+ run = "cuttlefish"
+
+ expected = "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format(
+ project=project, transfer_config=transfer_config, run=run,
+ )
+ actual = DataTransferServiceClient.run_path(project, transfer_config, run)
+ assert expected == actual
+
+
+def test_parse_run_path():
+ expected = {
+ "project": "mussel",
+ "transfer_config": "winkle",
+ "run": "nautilus",
+ }
+ path = DataTransferServiceClient.run_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataTransferServiceClient.parse_run_path(path)
+ assert expected == actual
+
+
+def test_transfer_config_path():
+ project = "scallop"
+ transfer_config = "abalone"
expected = "projects/{project}/transferConfigs/{transfer_config}".format(
project=project, transfer_config=transfer_config,
@@ -4545,8 +4694,8 @@ def test_transfer_config_path():
def test_parse_transfer_config_path():
expected = {
- "project": "whelk",
- "transfer_config": "octopus",
+ "project": "squid",
+ "transfer_config": "clam",
}
path = DataTransferServiceClient.transfer_config_path(**expected)
@@ -4555,6 +4704,107 @@ def test_parse_transfer_config_path():
assert expected == actual
+def test_common_billing_account_path():
+ billing_account = "whelk"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = DataTransferServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "octopus",
+ }
+ path = DataTransferServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataTransferServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "oyster"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = DataTransferServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nudibranch",
+ }
+ path = DataTransferServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataTransferServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "cuttlefish"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = DataTransferServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "mussel",
+ }
+ path = DataTransferServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataTransferServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "winkle"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = DataTransferServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "nautilus",
+ }
+ path = DataTransferServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataTransferServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "scallop"
+ location = "abalone"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = DataTransferServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "squid",
+ "location": "clam",
+ }
+ path = DataTransferServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataTransferServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()