diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index ae6c57fa..7d98291c 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:82b12321da4446a73cb11bcb6812fbec8c105abda3946d46e6394e5fbfb64c0f
+ digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
index 2072342e..0c655d67 100644
--- a/.kokoro/samples/lint/common.cfg
+++ b/.kokoro/samples/lint/common.cfg
@@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-bigquery-datatransfer/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg
new file mode 100644
index 00000000..397d8dc8
--- /dev/null
+++ b/.kokoro/samples/python3.10/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.10"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-310"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-datatransfer/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/continuous.cfg b/.kokoro/samples/python3.10/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.10/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg
new file mode 100644
index 00000000..fc517b6c
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery-datatransfer/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.10/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.10/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index d92ddf8d..7d084a06 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-bigquery-datatransfer/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.6/periodic.cfg
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 8c221a6e..26ad2182 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-bigquery-datatransfer/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.7/periodic.cfg
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index fa5c7d2f..08a5cfd9 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-bigquery-datatransfer/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.8/periodic.cfg
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
index acf30226..8fabe7ba 100644
--- a/.kokoro/samples/python3.9/common.cfg
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-bigquery-datatransfer/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.9/periodic.cfg
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
index e54dc4b5..ba3a707b 100755
--- a/.kokoro/test-samples-against-head.sh
+++ b/.kokoro/test-samples-against-head.sh
@@ -23,6 +23,4 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-bigquery-datatransfer
-
exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index a10f20a2..11c042d3 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -24,8 +24,6 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-bigquery-datatransfer
-
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
# preserving the test runner implementation.
diff --git a/.trampolinerc b/.trampolinerc
index 383b6ec8..0eee72ab 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -16,15 +16,26 @@
# Add required env vars here.
required_envvars+=(
- "STAGING_BUCKET"
- "V2_STAGING_BUCKET"
)
# Add env vars which are passed down into the container here.
pass_down_envvars+=(
+ "NOX_SESSION"
+ ###############
+ # Docs builds
+ ###############
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
- "NOX_SESSION"
+ ##################
+ # Samples builds
+ ##################
+ "INSTALL_LIBRARY_FROM_SOURCE"
+ "RUN_TESTS_SESSION"
+ "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ # Target directories.
+ "RUN_TESTS_DIRS"
+ # The nox session to run.
+ "RUN_TESTS_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index dc07d61b..bb34b440 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,13 @@
[1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history
+## [3.4.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.3.4...v3.4.0) (2021-10-08)
+
+
+### Features
+
+* add context manager support in client ([#234](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/234)) ([cba3dad](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/cba3dad55db8a00f95503b9c94d4ca21ed462a4f))
+
### [3.3.4](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.3.3...v3.3.4) (2021-10-04)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c098a7b5..616a78ce 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows.
+ 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.9 -- -k
+ $ nox -s unit-3.10 -- -k
.. note::
@@ -225,11 +225,13 @@ We support:
- `Python 3.7`_
- `Python 3.8`_
- `Python 3.9`_
+- `Python 3.10`_
.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
.. _Python 3.9: https://docs.python.org/3.9/
+.. _Python 3.10: https://docs.python.org/3.10/
Supported versions can be found in our ``noxfile.py`` `config`_.
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py
index fc73542b..4a9b81a5 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py
@@ -1403,6 +1403,12 @@ async def check_valid_creds(
# Done; return the response.
return response
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc, tb):
+ await self.transport.close()
+
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py
index 3f86cb3a..04a617c4 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py
@@ -388,10 +388,7 @@ def __init__(
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
- always_use_jwt_access=(
- Transport == type(self).get_transport_class("grpc")
- or Transport == type(self).get_transport_class("grpc_asyncio")
- ),
+ always_use_jwt_access=True,
)
def get_data_source(
@@ -1519,6 +1516,19 @@ def check_valid_creds(
# Done; return the response.
return response
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ """Releases underlying transport's resources.
+
+ .. warning::
+ ONLY use as a context manager if the transport is NOT shared
+ with other clients! Exiting the with block will CLOSE the transport
+ and may cause errors in other clients!
+ """
+ self.transport.close()
+
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py
index b74eab8e..1e9a77f3 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py
@@ -328,6 +328,15 @@ def _prep_wrapped_messages(self, client_info):
),
}
+ def close(self):
+ """Closes resources associated with the transport.
+
+ .. warning::
+ Only call this method if the transport is NOT shared
+ with other clients - this may cause errors in other clients!
+ """
+ raise NotImplementedError()
+
@property
def get_data_source(
self,
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py
index fb8ae0b5..a18df8a2 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py
@@ -631,5 +631,8 @@ def check_valid_creds(
)
return self._stubs["check_valid_creds"]
+ def close(self):
+ self.grpc_channel.close()
+
__all__ = ("DataTransferServiceGrpcTransport",)
diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py
index 65a51fba..fdff9e94 100644
--- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py
+++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py
@@ -650,5 +650,8 @@ def check_valid_creds(
)
return self._stubs["check_valid_creds"]
+ def close(self):
+ return self.grpc_channel.close()
+
__all__ = ("DataTransferServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
index 0e043788..656fd0a5 100644
--- a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
+++ b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
@@ -236,6 +236,7 @@ class DataRefreshType(proto.Enum):
class GetDataSourceRequest(proto.Message):
r"""A request to get data source info.
+
Attributes:
name (str):
Required. The field will contain name of the resource
@@ -275,6 +276,7 @@ class ListDataSourcesRequest(proto.Message):
class ListDataSourcesResponse(proto.Message):
r"""Returns list of supported data sources and their metadata.
+
Attributes:
data_sources (Sequence[google.cloud.bigquery_datatransfer_v1.types.DataSource]):
List of supported data sources and their
@@ -419,6 +421,7 @@ class UpdateTransferConfigRequest(proto.Message):
class GetTransferConfigRequest(proto.Message):
r"""A request to get data transfer information.
+
Attributes:
name (str):
Required. The field will contain name of the resource
@@ -447,6 +450,7 @@ class DeleteTransferConfigRequest(proto.Message):
class GetTransferRunRequest(proto.Message):
r"""A request to get data transfer run information.
+
Attributes:
name (str):
Required. The field will contain name of the resource
@@ -461,6 +465,7 @@ class GetTransferRunRequest(proto.Message):
class DeleteTransferRunRequest(proto.Message):
r"""A request to delete data transfer run information.
+
Attributes:
name (str):
Required. The field will contain name of the resource
@@ -504,6 +509,7 @@ class ListTransferConfigsRequest(proto.Message):
class ListTransferConfigsResponse(proto.Message):
r"""The returned list of pipelines in the project.
+
Attributes:
transfer_configs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferConfig]):
Output only. The stored pipeline transfer
@@ -568,6 +574,7 @@ class RunAttempt(proto.Enum):
class ListTransferRunsResponse(proto.Message):
r"""The returned list of pipelines in the project.
+
Attributes:
transfer_runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]):
Output only. The stored pipeline transfer
@@ -624,6 +631,7 @@ class ListTransferLogsRequest(proto.Message):
class ListTransferLogsResponse(proto.Message):
r"""The returned list transfer run messages.
+
Attributes:
transfer_messages (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferMessage]):
Output only. The stored pipeline transfer
@@ -678,6 +686,7 @@ class CheckValidCredsResponse(proto.Message):
class ScheduleTransferRunsRequest(proto.Message):
r"""A request to schedule transfer runs for a time range.
+
Attributes:
parent (str):
Required. Transfer configuration name in the form:
@@ -698,6 +707,7 @@ class ScheduleTransferRunsRequest(proto.Message):
class ScheduleTransferRunsResponse(proto.Message):
r"""A response to schedule transfer runs for a time range.
+
Attributes:
runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]):
The transfer runs that were scheduled.
@@ -708,6 +718,7 @@ class ScheduleTransferRunsResponse(proto.Message):
class StartManualTransferRunsRequest(proto.Message):
r"""A request to start manual transfer runs.
+
Attributes:
parent (str):
Transfer configuration name in the form:
@@ -759,6 +770,7 @@ class TimeRange(proto.Message):
class StartManualTransferRunsResponse(proto.Message):
r"""A response to start manual transfer runs.
+
Attributes:
runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]):
The transfer runs that were created.
diff --git a/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/google/cloud/bigquery_datatransfer_v1/types/transfer.py
index 76a5856f..2feaf5ae 100644
--- a/google/cloud/bigquery_datatransfer_v1/types/transfer.py
+++ b/google/cloud/bigquery_datatransfer_v1/types/transfer.py
@@ -67,6 +67,7 @@ class EmailPreferences(proto.Message):
class ScheduleOptions(proto.Message):
r"""Options customizing the data transfer schedule.
+
Attributes:
disable_auto_scheduling (bool):
If true, automatic scheduling of data
@@ -205,6 +206,7 @@ class TransferConfig(proto.Message):
class TransferRun(proto.Message):
r"""Represents a data transfer run.
+
Attributes:
name (str):
The resource name of the transfer run. Transfer run names
diff --git a/noxfile.py b/noxfile.py
index 93ea9b14..f885c682 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -29,7 +29,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index 1fd8956f..93a9122c 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to test samples.
-ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
diff --git a/setup.py b/setup.py
index 2b011f03..ded80ba3 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
name = "google-cloud-bigquery-datatransfer"
description = "BigQuery Data Transfer API client library"
-version = "3.3.4"
+version = "3.4.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py
index 6328d1f8..8fa41e57 100644
--- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py
+++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py
@@ -29,6 +29,7 @@
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
+from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import (
@@ -4589,6 +4590,9 @@ def test_data_transfer_service_base_transport():
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
+ with pytest.raises(NotImplementedError):
+ transport.close()
+
@requires_google_auth_gte_1_25_0
def test_data_transfer_service_base_transport_with_credentials_file():
@@ -5117,3 +5121,49 @@ def test_client_withDEFAULT_CLIENT_INFO():
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
+
+
+@pytest.mark.asyncio
+async def test_transport_close_async():
+ client = DataTransferServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "grpc_channel")), "close"
+ ) as close:
+ async with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_transport_close():
+ transports = {
+ "grpc": "_grpc_channel",
+ }
+
+ for transport, close_name in transports.items():
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, close_name)), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_client_ctx():
+ transports = [
+ "grpc",
+ ]
+ for transport in transports:
+ client = DataTransferServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport
+ )
+ # Test client calls underlying transport.
+ with mock.patch.object(type(client.transport), "close") as close:
+ close.assert_not_called()
+ with client:
+ pass
+ close.assert_called()